Diff of the two buildlogs: -- --- b1/build.log 2024-12-29 01:57:40.846928999 +0000 +++ b2/build.log 2024-12-29 02:20:13.257090527 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Fri Jan 30 20:11:47 -12 2026 -I: pbuilder-time-stamp: 1769847107 +I: Current time: Sun Dec 29 15:57:57 +14 2024 +I: pbuilder-time-stamp: 1735437477 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/unstable-reproducible-base.tgz] I: copying local configuration @@ -26,54 +26,86 @@ dpkg-source: info: applying 02_parse_h_dependency I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/44334/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/D01_modify_environment starting +debug: Running on ionos2-i386. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 Dec 29 01:58 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='i386' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=22 ' - DISTRIBUTION='unstable' - HOME='/root' - HOST_ARCH='i386' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="37" [3]="1" [4]="release" [5]="i686-pc-linux-gnu") + BASH_VERSION='5.2.37(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=i386 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=10 ' + DIRSTACK=() + DISTRIBUTION=unstable + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=i686 + HOST_ARCH=i386 IFS=' ' - INVOCATION_ID='6f89422fd77642db8e4f2b100a95f1a6' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - LD_LIBRARY_PATH='/usr/lib/libeatmydata' - LD_PRELOAD='libeatmydata.so' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='44334' - PS1='# ' - PS2='> ' + INVOCATION_ID=0e459c401fc340dcb9a65914be385aea + LANG=C + LANGUAGE=de_CH:de + LC_ALL=C + LD_LIBRARY_PATH=/usr/lib/libeatmydata + LD_PRELOAD=libeatmydata.so + MACHTYPE=i686-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=31488 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.Is5X30OB/pbuilderrc_4xQz --distribution unstable --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.Is5X30OB/b1 --logfile b1/build.log bison_3.8.2+dfsg-1.dsc' - SUDO_GID='112' - SUDO_UID='107' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://213.165.73.152:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.Is5X30OB/pbuilderrc_0C9g --distribution unstable --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.Is5X30OB/b2 --logfile b2/build.log bison_3.8.2+dfsg-1.dsc' + SUDO_GID=112 + SUDO_UID=107 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://46.16.76.132:3128 I: uname -a - Linux ionos16-i386 6.1.0-28-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.119-1 (2024-11-22) x86_64 GNU/Linux + Linux i-capture-the-hostname 6.1.0-28-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.119-1 (2024-11-22) x86_64 GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Nov 22 2024 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/44334/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 Nov 22 14:40 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -150,7 +182,7 @@ Get: 31 http://deb.debian.org/debian unstable/main i386 po-debconf all 1.0.21+nmu1 [248 kB] Get: 32 http://deb.debian.org/debian unstable/main i386 debhelper all 13.23 [919 kB] Get: 33 http://deb.debian.org/debian unstable/main i386 help2man i386 1.49.3 [198 kB] -Fetched 21.2 MB in 0s (57.5 MB/s) +Fetched 21.2 MB in 4s (5584 kB/s) Preconfiguring packages ... Selecting previously unselected package m4. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19844 files and directories currently installed.) @@ -302,7 +334,11 @@ fakeroot is already the newest version (1.36.2-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for unstable +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes dpkg-buildpackage: info: source package bison dpkg-buildpackage: info: source version 2:3.8.2+dfsg-1 dpkg-buildpackage: info: source distribution unstable @@ -913,7 +949,7 @@ /build/reproducible-path/bison-3.8.2+dfsg/src/getargs.c make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_build - make -j22 + make -j10 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' rm -f examples/c/reccalc/scan.stamp examples/c/reccalc/scan.stamp.tmp rm -f lib/alloca.h-t lib/alloca.h && \ @@ -921,6 +957,7 @@ sed -e 's|@''HAVE_ALLOCA_H''@|1|g' < ./lib/alloca.in.h; \ } > lib/alloca.h-t && \ mv -f lib/alloca.h-t lib/alloca.h +/usr/bin/mkdir -p examples/c/reccalc rm -f lib/configmake.h-t && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ echo '#if HAVE_WINSOCK2_H'; \ @@ -956,6 +993,7 @@ echo '#define PKGLIBEXECDIR "/usr/libexec/bison"'; \ } | sed '/""/d' > lib/configmake.h-t && \ mv -f lib/configmake.h-t lib/configmake.h +touch examples/c/reccalc/scan.stamp.tmp rm -f lib/fcntl.h-t lib/fcntl.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -982,7 +1020,7 @@ < ./lib/fcntl.in.h; \ } > lib/fcntl.h-t && \ mv lib/fcntl.h-t lib/fcntl.h -/usr/bin/mkdir -p examples/c/reccalc +flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/iconv.h-t lib/iconv.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1031,7 +1069,6 @@ < ./lib/inttypes.in.h; \ } > lib/inttypes.h-t && \ mv lib/inttypes.h-t lib/inttypes.h -touch examples/c/reccalc/scan.stamp.tmp rm -f lib/textstyle.h-t lib/textstyle.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ cat ./lib/textstyle.in.h; \ @@ -1076,7 +1113,6 @@ < ./lib/locale.in.h; \ } > lib/locale.h-t && \ mv lib/locale.h-t lib/locale.h -flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/math.h-t lib/math.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1417,6 +1453,15 @@ < ./lib/signal.in.h; \ } > lib/signal.h-t && \ mv lib/signal.h-t lib/signal.h +rm -f lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|__always_inline|inline _GL_ATTRIBUTE_ALWAYS_INLINE|g' \ + -e 's|__glibc_likely|_GL_LIKELY|g' \ + -e 's|__glibc_unlikely|_GL_UNLIKELY|g' \ + -e '/libc_hidden_proto/d' \ + < ./lib/malloc/scratch_buffer.h; \ +} > lib/malloc/scratch_buffer.gl.h-t && \ +mv lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h rm -f lib/spawn.h-t lib/spawn.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1465,15 +1510,6 @@ < ./lib/spawn.in.h; \ } > lib/spawn.h-t && \ mv lib/spawn.h-t lib/spawn.h -rm -f lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|__always_inline|inline _GL_ATTRIBUTE_ALWAYS_INLINE|g' \ - -e 's|__glibc_likely|_GL_LIKELY|g' \ - -e 's|__glibc_unlikely|_GL_UNLIKELY|g' \ - -e '/libc_hidden_proto/d' \ - < ./lib/malloc/scratch_buffer.h; \ -} > lib/malloc/scratch_buffer.gl.h-t && \ -mv lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h rm -f lib/stdio.h-t lib/stdio.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1825,8 +1861,6 @@ < ./lib/string.in.h; \ } > lib/string.h-t && \ mv lib/string.h-t lib/string.h -mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp -/usr/bin/mkdir -p lib/sys /usr/bin/mkdir -p lib/sys /usr/bin/mkdir -p lib/sys rm -f lib/sys/ioctl.h-t lib/sys/ioctl.h && \ @@ -1846,7 +1880,6 @@ < ./lib/sys_ioctl.in.h; \ } > lib/sys/ioctl.h-t && \ mv lib/sys/ioctl.h-t lib/sys/ioctl.h -/usr/bin/mkdir -p lib/sys rm -f lib/sys/resource.h-t lib/sys/resource.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1863,6 +1896,8 @@ < ./lib/sys_resource.in.h; \ } > lib/sys/resource.h-t && \ mv -f lib/sys/resource.h-t lib/sys/resource.h +mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp +/usr/bin/mkdir -p lib/sys rm -f lib/sys/stat.h-t lib/sys/stat.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1960,6 +1995,32 @@ } > lib/sys/times.h-t && \ mv lib/sys/times.h-t lib/sys/times.h /usr/bin/mkdir -p lib/sys +rm -f lib/sys/types.h-t lib/sys/types.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_TYPES_H''@||g' \ + -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ + -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ + < ./lib/sys_types.in.h; \ +} > lib/sys/types.h-t && \ +mv lib/sys/types.h-t lib/sys/types.h +/usr/bin/mkdir -p lib/sys +rm -f lib/sys/wait.h-t lib/sys/wait.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_WAIT_H''@||g' \ + -e 's/@''GNULIB_WAITPID''@/1/g' \ + -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ + -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ + < ./lib/sys_wait.in.h; \ +} > lib/sys/wait.h-t && \ +mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/termios.h-t lib/termios.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1975,18 +2036,6 @@ < ./lib/termios.in.h; \ } > lib/termios.h-t && \ mv lib/termios.h-t lib/termios.h -rm -f lib/sys/types.h-t lib/sys/types.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_TYPES_H''@||g' \ - -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ - -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ - < ./lib/sys_types.in.h; \ -} > lib/sys/types.h-t && \ -mv lib/sys/types.h-t lib/sys/types.h rm -f lib/time.h-t lib/time.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2032,19 +2081,6 @@ < ./lib/time.in.h; \ } > lib/time.h-t && \ mv lib/time.h-t lib/time.h -rm -f lib/sys/wait.h-t lib/sys/wait.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_WAIT_H''@||g' \ - -e 's/@''GNULIB_WAITPID''@/1/g' \ - -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ - -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ - < ./lib/sys_wait.in.h; \ -} > lib/sys/wait.h-t && \ -mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/unistd.h-t lib/unistd.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2500,10 +2536,6 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lalr.o `test -f 'src/lalr.c' || echo './'`src/lalr.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-location.o `test -f 'src/location.c' || echo './'`src/location.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lr0.o `test -f 'src/lr0.c' || echo './'`src/lr0.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lssi.o `test -f 'src/lssi.c' || echo './'`src/lssi.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-main.o `test -f 'src/main.c' || echo './'`src/main.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-muscle-tab.o `test -f 'src/muscle-tab.c' || echo './'`src/muscle-tab.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-named-ref.o `test -f 'src/named-ref.c' || echo './'`src/named-ref.c src/lalr.c: In function 'set_goto_map': src/lalr.c:152:26: warning: format '%ld' expects argument of type 'long int', but argument 5 has type 'goto_number' {aka 'unsigned int'} [-Wformat=] 152 | fprintf (stderr, "goto_map[%d (%s)] = %ld .. %ld\n", @@ -2521,6 +2553,10 @@ | ~~~~~~~~~~~~~~~~~ | | | goto_number {aka unsigned int} +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lssi.o `test -f 'src/lssi.c' || echo './'`src/lssi.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-main.o `test -f 'src/main.c' || echo './'`src/main.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-muscle-tab.o `test -f 'src/muscle-tab.c' || echo './'`src/muscle-tab.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-named-ref.o `test -f 'src/named-ref.c' || echo './'`src/named-ref.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-nullable.o `test -f 'src/nullable.c' || echo './'`src/nullable.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-output.o `test -f 'src/output.c' || echo './'`src/output.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-parse-gram.o `test -f 'src/parse-gram.c' || echo './'`src/parse-gram.c @@ -2562,17 +2598,6 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c -lib/canonicalize.c: In function 'canonicalize_filename_mode': -lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] - 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." - | ^~~~~~~ -lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] - 485 | #warning "See ." - | ^~~~~~~ lib/careadlinkat.c: In function 'careadlinkat': lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." @@ -2580,7 +2605,6 @@ lib/careadlinkat.c:179:5: warning: #warning "See ." [-Wcpp] 179 | #warning "See ." | ^~~~~~~ -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr] 182 | return readlink_stk (fd, filename, buffer, buffer_size, alloc, | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2589,6 +2613,18 @@ lib/careadlinkat.c:181:8: note: declared here 181 | char stack_buf[STACK_BUF_SIZE]; | ^~~~~~~~~ +lib/canonicalize.c: In function 'canonicalize_filename_mode': +lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] + 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." + | ^~~~~~~ +lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] + 485 | #warning "See ." + | ^~~~~~~ +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c @@ -2636,8 +2672,8 @@ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -if /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ +if /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ --include=./doc/bison.x \ --output=doc/bison.1.tmp tests/bison && \ { sed 's/^\(\.TH[^"]*"[^"]*"[^"]*\)"[^"]*"/\1/' doc/bison.1 >doc/bison.1a.tmp || true; } && \ @@ -2665,7 +2701,7 @@ make[2]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_test - make -j22 check "TESTSUITEFLAGS=-j22 --verbose" VERBOSE=1 + make -j10 check "TESTSUITEFLAGS=-j10 --verbose" VERBOSE=1 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' if test -d ./.git \ && git --version >/dev/null 2>&1; then \ @@ -2702,81 +2738,81 @@ ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make examples/c/calc/calc examples/c/glr/c++-types examples/c/lexcalc/lexcalc examples/c/mfcalc/mfcalc examples/c/pushcalc/calc examples/c/reccalc/reccalc examples/c/rpcalc/rpcalc examples/c++/calc++/calc++ examples/c++/glr/c++-types examples/c++/simple examples/c++/variant examples/c++/variant-11 ./tests/bison tests/atconfig tests/atlocal make[4]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' -/bin/bash ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex -/bin/bash ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex +/bin/sh ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +updating examples/c/lexcalc/parse.output +updating examples/c/lexcalc/parse.h +updating examples/c/mfcalc/mfcalc.output +updating examples/c/mfcalc/mfcalc.h +updating examples/c/glr/c++-types.output +updating examples/c/glr/c++-types.h +/bin/sh ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines rm -f examples/c++/calc++/parser.stamp \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex -rm -f examples/c++/glr/c++-types.stamp +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex touch examples/c++/calc++/parser.stamp.tmp -\ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -touch examples/c++/glr/c++-types.stamp.tmp +rm -f examples/c++/glr/c++-types.stamp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy -\ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +touch examples/c++/glr/c++-types.stamp.tmp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/glr/c++-types.cc examples/c++/glr/c++-types.yy \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +\ +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +\ +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines make[4]: 'tests/bison' is up to date. make[4]: Nothing to be done for 'tests/atconfig'. make[4]: 'tests/atlocal' is up to date. -updating examples/c/lexcalc/parse.output +gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c updating examples/c/calc/calc.output -updating examples/c/rpcalc/rpcalc.output -updating examples/c/lexcalc/parse.h -updating examples/c/pushcalc/calc.output updating examples/c/calc/calc.h -mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c +mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c +updating examples/c/pushcalc/calc.output +updating examples/c/rpcalc/rpcalc.output +updating examples/c/pushcalc/calc.h updating examples/c/reccalc/parse.output -gcc -DEXEEXT=\"\" -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c -updating examples/c/rpcalc/rpcalc.h -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc -gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c -updating examples/c/mfcalc/mfcalc.output updating examples/c++/simple.output -updating examples/c/pushcalc/calc.h +gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c +updating examples/c/rpcalc/rpcalc.h gcc -DEXEEXT=\"\" -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c updating examples/c/reccalc/parse.h -gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c -gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c -updating examples/c/mfcalc/mfcalc.h -updating examples/c++/simple.hh -gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c -updating examples/c/glr/c++-types.output -g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc -updating examples/c++/variant.output -updating examples/c/glr/c++-types.h mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp +gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c +updating examples/c++/variant.output updating examples/c++/variant.hh -gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant-variant.o `test -f 'examples/c++/variant.cc' || echo './'`examples/c++/variant.cc -g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc +gcc -DEXEEXT=\"\" -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c +gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c +gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm +gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c updating examples/c++/variant-11.output updating examples/c++/variant-11.hh +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc +updating examples/c++/simple.hh +g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc +g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant_11-variant-11.o `test -f 'examples/c++/variant-11.cc' || echo './'`examples/c++/variant-11.cc -gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm -gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/calc/calc examples/c/calc/examples_c_calc_calc-calc.o +gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/glr/c++-types examples/c/glr/examples_c_glr_c___types-c++-types.o +gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/pushcalc/calc examples/c/pushcalc/examples_c_pushcalc_calc-calc.o gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/reccalc/reccalc examples/c/reccalc/examples_c_reccalc_reccalc-parse.o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o -gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o -gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/glr/c++-types examples/c/glr/examples_c_glr_c___types-c++-types.o -g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant-11 examples/c++/variant_11-variant-11.o -g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/simple examples/c++/simple-simple.o +gcc -g -O2 -Werror=implicit-function-declaration -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o +g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/simple examples/c++/simple-simple.o +g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant-11 examples/c++/variant_11-variant-11.o g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/calc++/calc++ examples/c++/calc++/calc__-driver.o examples/c++/calc++/calc__-scanner.o examples/c++/calc++/calc__-calc++.o examples/c++/calc++/calc__-parser.o g++ -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/glr/c++-types examples/c++/glr/examples_c___glr_c___types-c++-types.o make[4]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' @@ -2793,9 +2829,9 @@ echo 'm4_define([AT_PACKAGE_BUGREPORT], [bug-bison@gnu.org])'; \ } >tests/package.m4.tmp mv tests/package.m4.tmp tests/package.m4 -\ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' +\ + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' Making all in po make[6]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg/po' @@ -2812,25 +2848,25 @@ Making all in . make[6]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' PASS: examples/c/glr/c++-types.test -PASS: examples/c++/variant.test -PASS: examples/c++/simple.test -PASS: examples/c++/glr/c++-types.test -PASS: examples/c++/variant-11.test PASS: examples/c/mfcalc/mfcalc.test -/usr/bin/mkdir -p doc PASS: examples/c/lexcalc/lexcalc.test PASS: examples/c/calc/calc.test -PASS: examples/c/pushcalc/calc.test PASS: examples/c/rpcalc/rpcalc.test +/usr/bin/mkdir -p doc +PASS: examples/c/reccalc/reccalc.test +PASS: examples/c/pushcalc/calc.test +PASS: examples/c++/simple.test +PASS: examples/c++/glr/c++-types.test LC_ALL=C tests/bison --version >doc/bison.help.tmp +PASS: examples/c++/variant.test +PASS: examples/c++/variant-11.test LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -PASS: examples/c/reccalc/reccalc.test +PASS: examples/c++/calc++/calc++.test make[6]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' -PASS: examples/c++/calc++/calc++.test ============================================================================ Testsuite summary for GNU Bison 3.8.2 ============================================================================ @@ -2845,7 +2881,7 @@ make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' "/usr/bin/perl" -pi -e 's/\@tb\@/\t/g' tests/testsuite.tmp mv tests/testsuite.tmp tests/testsuite -/bin/bash ./tests/testsuite -C tests -j22 --verbose +/bin/sh ./tests/testsuite -C tests -j10 --verbose ## --------------------------- ## ## GNU Bison 3.8.2 test suite. ## ## --------------------------- ## @@ -2859,49 +2895,29 @@ - - - - - - - - - - - - -7. input.at:204: testing Yacc warnings ... -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +1. m4.at:21: testing Generating Comments ... +./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y 3. input.at:58: testing Invalid options ... ./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y 4. input.at:83: testing Invalid inputs ... ./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77 -11. input.at:401: testing Dangling aliases ... +7. input.at:204: testing Yacc warnings ... +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +10. input.at:341: testing Redefining the error token ... +./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +2. input.at:27: testing Invalid number of arguments ... +./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret +./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y 5. input.at:147: testing Invalid inputs with {} ... ./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +9. input.at:287: testing Invalid symbol declarations ... +./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 8. input.at:238: testing Yacc's %type ... ./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y 6. input.at:173: testing Yacc warnings on symbols ... ./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y -2. input.at:27: testing Invalid number of arguments ... -./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -1. m4.at:21: testing Generating Comments ... -./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y -10. input.at:341: testing Redefining the error token ... -./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -9. input.at:287: testing Invalid symbol declarations ... -./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -12. input.at:427: testing Symbol declarations ... -./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y -13. input.at:528: testing Invalid $n and @n ... stderr: -17. input.at:794: testing Symbol redeclared ... -14. input.at:552: testing Type Clashes ... -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y bison: invalid argument 'error=caret' for '--feature' Valid arguments are: - 'none' @@ -2909,27 +2925,9 @@ - 'fixit', 'diagnostics-parseable-fixits' - 'syntax-only' - 'all' -18. input.at:832: testing EOF redeclared ... -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y -15. input.at:774: testing Unused values ... -21. input.at:970: testing Per-type %printer and %destructor redeclared ... -16. input.at:784: testing Unused values before symbol declarations ... -./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=error=itemsets input.y -20. input.at:899: testing Default %printer and %destructor redeclared ... -./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -22. input.at:1013: testing Undefined symbols ... -./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -19. input.at:859: testing Symbol class redefinition ... -5. input.at:147: ./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y - ok -./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y stderr: -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton -input.y:1.11: error: invalid null character +5. input.at:147: input.y:1.11: error: invalid null character 1 | %header "ð€ˆ" | ^ input.y:2.1-2: error: invalid characters: '\0\001\002\377?' @@ -2962,9 +2960,9 @@ input.y:10.1-11.0: error: missing '%}' at end of file 10 | %{ | ^~ -stderr: + ok ./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr -./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: bison: invalid argument 'error=itemsets' for '--report' Valid arguments are: - 'none' @@ -2974,20 +2972,12 @@ - 'solved' - 'counterexamples', 'cex' - 'all' +./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror?all input.y +./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton 9. input.at:287: ok stderr: -21. input.at:970: ok -14. input.at:552: ok -bison: option '--skeleton' requires an argument -Try 'bison --help' for more information. -./input.at:43: sed -e \ - "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ - stderr -22. input.at:1013: ok -4. input.at:83: ok stderr: -13. input.at:528: ok bison: invalid argument 'error?all' for '--warning' Valid arguments are: - 'all' @@ -3003,120 +2993,50 @@ - 'other' - 'precedence' - 'yacc' -./m4.at:55: cat output.txt +4. input.at:83: ok +bison: option '--skeleton' requires an argument +Try 'bison --help' for more information. +./input.at:43: sed -e \ + "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ + stderr 3. input.at:58: ok -./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -19. input.at:859: ok 2. input.at:27: ok -1. m4.at:21: ok - -23. input.at:1045: testing Unassociated types used for a printer or destructor ... -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y - -20. input.at:899: ok -25. input.at:1139: testing Unused values with default %destructor ... - -24. input.at:1074: testing Useless printers or destructors ... -26. input.at:1187: testing Unused values with per-type %destructor ... -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -27. input.at:1219: testing Duplicate string ... -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +11. input.at:401: testing Dangling aliases ... +13. input.at:528: testing Invalid $n and @n ... +12. input.at:427: testing Symbol declarations ... +./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y +14. input.at:552: testing Type Clashes ... +./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y +15. input.at:774: testing Unused values ... +./m4.at:55: cat output.txt +13. input.at:528: 14. input.at:552: ./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y + ok + ok +1. m4.at:21: ok -28. input.at:1247: testing Token collisions ... -./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -29. input.at:1275: testing Incompatible Aliases ... -./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -31. input.at:1569: testing Typed symbol aliases ... -32. input.at:1609: testing Require 1.0 ... -./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -30. input.at:1400: testing Torturing the Scanner ... -./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:468: cat symbols.csv -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -33. input.at:1610: testing Require 3.8.2 ... -28. input.at:1247: ok -./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +17. input.at:794: testing Symbol redeclared ... +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +16. input.at:784: testing Unused values before symbol declarations ... +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -34. input.at:1612: testing Require 100.0 ... -./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -12. input.at:427: ok -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -stderr: -input.y:9.10-16: error: require bison 100.0, but have 3.8.2 +18. input.at:832: testing EOF redeclared ... +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -34. input.at:1612: ok -./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y - -./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -35. input.at:1619: testing String aliases for character tokens ... -./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -36. input.at:1642: testing Symbols ... -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -stderr: -input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO FOO - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO FOO - | ^~~ -input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:2.8-10: note: previous declaration - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] - 3 | %token EOF 0 EOF 0 - | ^~~ -input.y:3.8-10: note: previous declaration - 3 | %token EOF 0 EOF 0 - | ^~~ -37. input.at:1708: testing Numbered tokens ... -./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y -stderr: -stderr: -stderr: -31. input.at:1569: input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO BAR FOO 0 - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO BAR FOO 0 - | ^~~ - ok -stderr: -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror -input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] - 2 | %type "bar" - | ^~~~~ -input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] - 4 | expr: "foo" "bar" "baz" - | ^~~~~ -./input.at:804: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +./input.at:468: cat symbols.csv +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +12. input.at:427: ok stderr: -33. input.at:1610: ok -./input.at:843: sed 's,.*/$,,' stderr 1>&2 input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 1 | %nterm exp | ^~~~~~ @@ -3129,14 +3049,19 @@ input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] 4 | exp: "number"; | ^~~~~~~~ -./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -32. input.at:1609: ok -./input.at:410: sed 's,.*/$,,' stderr 1>&2 -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -stderr: +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror ./input.at:182: sed 's,.*/$,,' stderr 1>&2 +19. input.at:859: testing Symbol class redefinition ... +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +19. input.at:859: ok + +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +20. input.at:899: testing Default %printer and %destructor redeclared ... +./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +stderr: +./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc] 1 | %destructor {} | ^~~~~~~~~~~ @@ -3149,17 +3074,15 @@ input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc] 7 | b: %empty { $$ = 42; }; | ^~~~~~ - -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error -38. input.at:1750: testing Unclosed constructs ... -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error - -./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -./input.at:216: sed 's,.*/$,,' stderr 1>&2 stderr: -39. input.at:1805: testing %start after first rule ... +input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO BAR FOO 0 + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO BAR FOO 0 + | ^~~ +stderr: +stderr: input.y:2.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 2 | %nterm nterm1 | ^~~~~~ @@ -3184,13 +3107,217 @@ input.y:10.9-16: error: POSIX Yacc does not support string literals [-Werror=yacc] 10 | nterm3: "TOKEN3" | ^~~~~~~~ -38. input.at:1750: ok -40. input.at:1826: testing Duplicate %start symbol ... +./input.at:843: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: sed 's,.*/$,,' stderr 1>&2 +input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] + 2 | %type "bar" + | ^~~~~ +input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] + 4 | expr: "foo" "bar" "baz" + | ^~~~~ +20. input.at:899: ok +stderr: +input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO FOO + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO FOO + | ^~~ +input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:2.8-10: note: previous declaration + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] + 3 | %token EOF 0 EOF 0 + | ^~~ +input.y:3.8-10: note: previous declaration + 3 | %token EOF 0 EOF 0 + | ^~~ ./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -35. input.at:1619: stderr: -29. input.at:1275: ok +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:410: sed 's,.*/$,,' stderr 1>&2 +./input.at:253: sed 's,.*/$,,' stderr 1>&2 +./input.at:804: sed 's,.*/$,,' stderr 1>&2 + +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +21. input.at:970: testing Per-type %printer and %destructor redeclared ... +./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +21. input.at:970: ok + +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +22. input.at:1013: testing Undefined symbols ... +./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +22. input.at:1013: ok +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none + +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +23. input.at:1045: testing Unassociated types used for a printer or destructor ... +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +18. input.at:832: ./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none ok +6. input.at:173: ok + +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + +24. input.at:1074: testing Useless printers or destructors ... +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +25. input.at:1139: testing Unused values with default %destructor ... +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +11. input.at:401: ok +7. input.at:204: ok +17. input.at:794: ok + + + +26. input.at:1187: testing Unused values with per-type %destructor ... +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +27. input.at:1219: testing Duplicate string ... +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +stderr: +28. input.at:1247: testing Token collisions ... +./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] +input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +8. input.at:238: ok +28. input.at:1247: ok +./input.at:1062: sed 's,.*/$,,' stderr 1>&2 +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + + +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +29. input.at:1275: testing Incompatible Aliases ... +./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +30. input.at:1400: testing Torturing the Scanner ... +./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror +./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +stderr: +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +stderr: input.y:6.8-45: error: unset value: $$ [-Werror=other] 6 | start: end end tagged tagged { $1; $3; } ; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3201,14 +3328,121 @@ 7 | end: { } ; | ^~~ stderr: -./input.at:253: sed 's,.*/$,,' stderr 1>&2 -stderr: -input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] -input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] - +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +./input.at:785: sed 's,.*/$,,' stderr 1>&2 ./input.at:1152: sed 's,.*/$,,' stderr 1>&2 -./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:775: sed 's,.*/$,,' stderr 1>&2 +./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +stderr: input.y:6.8-22: error: unset value: $$ [-Werror=other] 6 | start: end end { $1; } ; | ^~~~~~~~~~~~~~~ @@ -3218,52 +3452,100 @@ input.y:7.6-8: error: unset value: $$ [-Werror=other] 7 | end: { } ; | ^~~ +29. input.at:1275: ok +stderr: +input.y:16.13-19: error: useless %printer for type [-Werror=other] +input.y:17.16-22: error: useless %destructor for type [-Werror=other] +stderr: +./input.at:1199: sed 's,.*/$,,' stderr 1>&2 +input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] -./input.at:1062: sed 's,.*/$,,' stderr 1>&2 +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1085: sed 's,.*/$,,' stderr 1>&2 +./input.at:1236: sed 's,.*/$,,' stderr 1>&2 +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error +31. input.at:1569: testing Typed symbol aliases ... +./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +31. input.at:1569: ok + +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +23. input.at:1045: ok +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +32. input.at:1609: testing Require 1.0 ... +./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none +33. input.at:1610: testing Require 3.8.2 ... +./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +stderr: +32. input.at:1609: ok +26. input.at:1187: ok +stderr: + +27. input.at:1219: ok +33. input.at:1610: ok +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +34. input.at:1612: testing Require 100.0 ... + +./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none + + +stderr: +input.y:9.10-16: error: require bison 100.0, but have 3.8.2 +35. input.at:1619: testing String aliases for character tokens ... +./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +34. input.at:1612: ok +37. input.at:1708: testing Numbered tokens ... +./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y +36. input.at:1642: testing Symbols ... +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y + +38. input.at:1750: testing Unclosed constructs ... +./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +38. input.at:1750: ok +35. input.at:1619: ok + + +39. input.at:1805: testing %start after first rule ... +./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +40. input.at:1826: testing Duplicate %start symbol ... +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Werror -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +stderr: +input.y:6.23-28: error: unused value: $4 [-Werror=other] +input.y:8.9-11: error: unset value: $$ [-Werror=other] +./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y +./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +37. input.at:1708: ok +39. input.at:1805: ok + 41. input.at:1895: testing %prec takes a token ... ./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1199: sed 's,.*/$,,' stderr 1>&2 -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +41. input.at:1895: ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + ok 42. input.at:1916: testing %prec's token must be defined ... ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error + +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none 43. input.at:1936: testing Reject unused %code qualifiers ... ./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none stderr: -41. input.at:1895: ok -input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1236: sed 's,.*/$,,' stderr 1>&2 - -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error -39. input.at:1805: ok -stderr: -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -input.y:16.13-19: error: useless %printer for type [-Werror=other] -input.y:17.16-22: error: useless %destructor for type [-Werror=other] -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -44. input.at:2025: testing Multiple %code ... -./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1085: sed 's,.*/$,,' stderr 1>&2 -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -37. input.at:1708: ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror - ok -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -stderr: -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc] input.y:10.21-34: error: POSIX Yacc does not support string literals [-Werror=yacc] @@ -3272,24 +3554,8 @@ input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc] input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc] input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc] - -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -45. input.at:2065: testing errors ... -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none -./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y ./input.at:1666: sed 's,.*/$,,' stderr 1>&2 -./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y -./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -46. input.at:2102: testing %define, --define, --force-define ... -./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ - -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ - -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ - -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ - --skeleton ./skel.c input.y input.y:1.12-14: error: duplicate directive [-Werror=other] 1 | %start exp exp exp | ^~~ @@ -3303,81 +3569,75 @@ 1 | %start exp exp exp | ^~~ input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +input.y:3.13-14: error: useless %printer for type <> [-Werror=other] ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error -./input.at:2123: cat input.tab.c -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none ./input.at:1836: sed 's,.*/$,,' stderr 1>&2 -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1116: sed 's,.*/$,,' stderr 1>&2 +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y stderr: input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other] -./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none ./input.at:1925: sed 's,.*/$,,' stderr 1>&2 -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -17. input.at:794: ok -11. input.at:401: ok -./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y -18. input.at:832: ok -45. input.at:2065: ok -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -6. input.at:173: ok ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - - -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none - - -./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y -47. input.at:2170: testing "%define" Boolean variables ... -./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y -48. input.at:2191: testing "%define" code variables ... -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -stderr: -49. input.at:2224: testing "%define" keyword variables ... -stdout: -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:391: $PREPARSER ./input -stderr: - -50. input.at:2257: testing "%define" enum variables ... -./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -8. input.at:238: ok -26. input.at:1187: ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none -10. input.at:341: ok - ok -./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -7. input.at:204: ok -51. input.at:2320: testing "%define" file variables ... -./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none - -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - - -46. input.at:2102: ok +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y +25. input.at:1139: ok +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -52. input.at:2342: testing "%define" backward compatibility ... -./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -23. input.at:1045: ok +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none +44. input.at:2025: testing Multiple %code ... +./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +42. input.at:1916: ok +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y -53. input.at:2393: testing Unused api.pure ... -./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -54. input.at:2429: testing C++ namespace reference errors ... -./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -52. input.at:2342: ok -47. input.at:2170: stderr: - ok -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +45. input.at:2065: testing errors ... +./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y +./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y +./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y stderr: -stdout: +input.y:1.16-18: error: duplicate directive [-Werror=other] + 1 | %start exp foo exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo exp + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:1859: sed 's,.*/$,,' stderr 1>&2 +45. input.at:2065: ok +46. input.at:2102: testing %define, --define, --force-define ... +./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ + -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ + -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ + -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ + --skeleton ./skel.c input.y +./input.at:2123: cat input.tab.c +./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +stderr: +stderr: +input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3393,6 +3653,12 @@ input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] 13 | b: INT | %empty; | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ input.y:14.10-62: error: unset value: $$ [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3402,6 +3668,9 @@ input.y:14.43-45: error: unused value: $5 [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ input.y:15.10-49: error: unset value: $$ [-Werror=other] 15 | d: INT | INT { } INT { $1; } INT { $2; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3426,9 +3695,15 @@ input.y:18.10-12: error: unused value: $1 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ input.y:18.31-33: error: unused value: $3 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ input.y:18.52-54: error: unused value: $5 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ @@ -3441,9 +3716,15 @@ input.y:19.31-33: error: unused value: $3 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:19.66-68: error: unused value: $5 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ input.y:22.10-68: error: unset value: $$ [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3453,6 +3734,9 @@ input.y:22.14-16: error: unused value: $2 [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:25.23-25: error: unset value: $$ [-Werror=other] 25 | n: INT | INT { } INT { } INT { }; | ^~~ @@ -3483,21 +3767,6 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -55. input.at:2482: testing Bad character literals ... -./input.at:2484: -set x `LC_ALL=C ls -l 'empty.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 -./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c -56. input.at:2543: testing Bad escapes in literals ... -./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 -./input.at:785: sed 's,.*/$,,' stderr 1>&2 - -./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y -57. input.at:2582: testing Unexpected end of file ... -./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror stderr: input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; @@ -3514,6 +3783,12 @@ input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] 13 | b: INT | %empty; | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ input.y:14.10-62: error: unset value: $$ [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3523,6 +3798,9 @@ input.y:14.43-45: error: unused value: $5 [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ input.y:15.10-49: error: unset value: $$ [-Werror=other] 15 | d: INT | INT { } INT { $1; } INT { $2; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3547,9 +3825,15 @@ input.y:18.10-12: error: unused value: $1 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ input.y:18.31-33: error: unused value: $3 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ input.y:18.52-54: error: unused value: $5 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ @@ -3562,9 +3846,15 @@ input.y:19.31-33: error: unused value: $3 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:19.66-68: error: unused value: $5 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ input.y:22.10-68: error: unset value: $$ [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3574,6 +3864,9 @@ input.y:22.14-16: error: unused value: $2 [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:25.23-25: error: unset value: $$ [-Werror=other] 25 | n: INT | INT { } INT { } INT { }; | ^~~ @@ -3604,219 +3897,321 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:776: sed 's,.*/$,,' stderr 1>&2 +./input.at:1124: sed 's,.*/$,,' stderr 1>&2 +./input.at:786: sed 's,.*/$,,' stderr 1>&2 +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +43. input.at:1936: ok +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none + +47. input.at:2170: testing "%define" Boolean variables ... +./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y +46. input.at:2102: ok + +stderr: +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +stdout: +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +48. input.at:2191: testing "%define" code variables ... +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +36. input.at:1642: ok +47. input.at:2170: ok + + +49. input.at:2224: testing "%define" keyword variables ... +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +50. input.at:2257: testing "%define" enum variables ... +24. input.at:1074: ok +./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +stderr: +stdout: +./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c + +./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +51. input.at:2320: testing "%define" file variables ... +./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror ./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -56. input.at:2543: ok stderr: -./input.at:775: sed 's,.*/$,,' stderr 1>&2 -0+0 records in -0+0 records out -0 bytes copied, 5.604e-05 s, 0.0 kB/s -27. input.at:1219: stdout: +stdout: +./input.at:391: $PREPARSER ./input +stderr: +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +51. input.at:2320: stdout: + ok +stderr: +./input.at:2055: $PREPARSER ./input +./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +10. input.at:341: ok +./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS +44. input.at:2025: + ok +50. input.at:2257: ok +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + +52. input.at:2342: testing "%define" backward compatibility ... +./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + + +53. input.at:2393: testing Unused api.pure ... +./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +52. input.at:2342: ok +54. input.at:2429: testing C++ namespace reference errors ... +55. input.at:2482: testing Bad character literals ... +./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2484: +set x `LC_ALL=C ls -l 'empty.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 +stderr: + +stdout: +./input.at:1558: $PREPARSER ./input +stderr: +stderr: +./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] +input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] +input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] +input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] +input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] +stderr: +56. input.at:2543: testing Bad escapes in literals ... +input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 +./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +30. input.at:1400: ok +./input.at:2246: sed 's,.*/$,,' stderr 1>&2 +./input.at:2213: sed 's,.*/$,,' stderr 1>&2 +56. input.at:2543: stderr: ok +input.y:2.8-10: error: duplicate directive [-Werror=other] + 2 | %start exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error + +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1877: sed 's,.*/$,,' stderr 1>&2 + +57. input.at:2582: testing Unexpected end of file ... +./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stderr: +58. input.at:2675: testing LAC: Errors for %define ... ./input.at:2591: set x `LC_ALL=C ls -l 'char.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none - -51. input.at:2320: ok -./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y -58. input.at:2675: testing LAC: Errors for %define ... ./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none - -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +0+0 records in +0+0 records out +0 bytes copied, 5.8252e-05 s, 0.0 kB/s +stdout: +./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y stderr: -50. input.at:2257: ok -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror 0+0 records in 0+0 records out -0 bytes copied, 4.6035e-05 s, 0.0 kB/s +0 bytes copied, 5.4745e-05 s, 0.0 kB/s stdout: - +./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y ./input.at:2508: set x `LC_ALL=C ls -l 'two.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77 -./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y - -60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... -./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y - -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -59. input.at:2719: testing -Werror combinations ... -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stderr: +0+0 records in +0+0 records out +0 bytes copied, 4.8228e-05 s, 0.0 kB/s ./input.at:2604: set x `LC_ALL=C ls -l 'escape-in-char.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77 -stderr: -61. input.at:2793: testing Redefined %union name ... -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -0+0 records in -0+0 records out -0 bytes copied, 3.4587e-05 s, 0.0 kB/s stdout: -./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y -62. input.at:2840: testing Stray $ or @ ... -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y stderr: +./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y 0+0 records in 0+0 records out -0 bytes copied, 5.1219e-05 s, 0.0 kB/s +0 bytes copied, 5.4277e-05 s, 0.0 kB/s stdout: ./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y +./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:2522: set x `LC_ALL=C ls -l 'three.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 -63. input.at:2883: testing Code injection ... -./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -stderr: -stderr: -stdout: -stderr: -input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] -stderr: -stdout: -input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] -input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] -input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] -input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] -input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS -./input.at:2055: $PREPARSER ./input -stderr: -0+0 records in -0+0 records out -0 bytes copied, 4.1111e-05 s, 0.0 kB/s -stdout: -stderr: -42. input.at:1916: ok -./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y -./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y -./input.at:2246: sed 's,.*/$,,' stderr 1>&2 -44. input.at:2025: ok -./input.at:2213: sed 's,.*/$,,' stderr 1>&2 ./input.at:2617: set x `LC_ALL=C ls -l 'string.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 - -stderr: -55. input.at:2482: ok -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -input.y:6.23-28: error: unused value: $4 [-Werror=other] -input.y:8.9-11: error: unset value: $$ [-Werror=other] -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error - -./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y +./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none stderr: 0+0 records in 0+0 records out -0 bytes copied, 4.9214e-05 s, 0.0 kB/s +0 bytes copied, 4.0811e-05 s, 0.0 kB/s stdout: - -./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -64. input.at:2946: testing Deprecated directives ... -./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y -./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:3019: cp errors-all experr -65. input.at:3077: testing Unput's effect on locations ... -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror stderr: -./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +0+0 records in +0+0 records out +0 bytes copied, 5.6227e-05 s, 0.0 kB/s stdout: -./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y -./input.at:1558: $PREPARSER ./input -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -66. input.at:3113: testing Non-deprecated directives ... -stderr: -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y -./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y +./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y +./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none ./input.at:2630: set x `LC_ALL=C ls -l 'escape-in-string.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77 -30. input.at:1400: ok -65. input.at:3077: stderr: - ok +55. input.at:2482: ok +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +stderr: + 0+0 records in 0+0 records out -0 bytes copied, 5.1081e-05 s, 0.0 kB/s +0 bytes copied, 6.9086e-05 s, 0.0 kB/s stdout: ./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-string.y -stderr: -input.y:2.15: error: stray '$' [-Werror=other] - -./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y ./input.at:2643: set x `LC_ALL=C ls -l 'tstring.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77 -./input.at:2727: sed 's,.*/$,,' stderr 1>&2 -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -stderr: -stderr: - -input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:1.8-10: note: previous definition -input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:3.8-10: note: previous definition -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input.y:1.16-18: error: duplicate directive [-Werror=other] - 1 | %start exp foo exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo exp - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +59. input.at:2719: testing -Werror combinations ... +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none stderr: -67. input.at:3148: testing Cannot type action ... -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none 0+0 records in 0+0 records out -0 bytes copied, 4.7824e-05 s, 0.0 kB/s -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +0 bytes copied, 7.081e-05 s, 0.0 kB/s stdout: -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none ./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y -./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./input.at:2808: sed 's,.*/$,,' stderr 1>&2 -./input.at:1859: sed 's,.*/$,,' stderr 1>&2 -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror ./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none ./input.at:2656: set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 -68. input.at:3171: testing Character literals and api.token.raw ... -./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +stderr: +0+0 records in +0+0 records out +0 bytes copied, 4.9583e-05 s, 0.0 kB/s +stdout: +./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +57. input.at:2582: ok +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y + +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... +./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y +./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y +40. input.at:1826: ok +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +stderr: +input.y:2.15: error: stray '$' [-Werror=other] + +./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y +./input.at:2727: sed 's,.*/$,,' stderr 1>&2 +61. input.at:2793: testing Redefined %union name ... +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y +15. input.at:774: ok +53. input.at:2393: ok +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none +./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + +./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y + +62. input.at:2840: testing Stray $ or @ ... +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y +stderr: +input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] +input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] +input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] +input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] +input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] +63. input.at:2883: testing Code injection ... +16. input.at:784: ok +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +./input.at:2247: sed 's,.*/$,,' stderr 1>&2 + +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +stderr: +input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:1.8-10: note: previous definition +input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:3.8-10: note: previous definition +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y +./input.at:2808: sed 's,.*/$,,' stderr 1>&2 +64. input.at:2946: testing Deprecated directives ... +./input.at:3019: cp errors-all experr ./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +stderr: +./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y +./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y +input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./input.at:2214: sed 's,.*/$,,' stderr 1>&2 +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +60. input.at:2764: ok +./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none + +./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +65. input.at:3077: testing Unput's effect on locations ... +./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +65. input.at:3077: ok stderr: -./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror input.y:11.19: error: stray '$' [-Werror=other] input.y:11.23: error: stray '@' [-Werror=other] input.y:12.19: error: stray '$' [-Werror=other] @@ -3826,1176 +4221,935 @@ input.y:16.19: error: stray '$' [-Werror=other] input.y:16.23: error: stray '@' [-Werror=other] input.y:17.19: error: stray '$' [-Werror=other] -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -68. input.at:3171: ok -stderr: -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -43. input.at:1936: ok -0+0 records in -0+0 records out -0 bytes copied, 5.1377e-05 s, 0.0 kB/s -stdout: -./input.at:2861: sed 's,.*/$,,' stderr 1>&2 -./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2861: sed 's,.*/$,,' stderr 1>&2 +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +66. input.at:3113: testing Non-deprecated directives ... +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: -input.y:3.13-14: error: useless %printer for type <> [-Werror=other] -./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y -57. input.at:2582: ok -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none - -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1116: sed 's,.*/$,,' stderr 1>&2 -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -69. input.at:3205: testing %token-table and parse.error ... -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - -70. input.at:3231: testing Invalid file prefix mapping arguments ... -./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -stderr: -input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] -input.y:13.1-18: note: previous declaration -input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y -./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y -71. named-refs.at:22: testing Tutorial calculator ... -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:3133: sed 's,.*/$,,' stderr 1>&2 +input.y:2.15: error: stray '$' [-Werror=other] +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +./input.at:2730: sed 's,.*/$,,' stderr 1>&2 ./input.at:3027: rm -f output.c +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error ./input.at:3028: cp input.y input.y.orig -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:3029: sed -e '/fix-it/d' experr -./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y ./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr ./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: -stderr: +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none ./input.at:3034: diff input.y.orig input.y~ -input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] - 10 | exp: {} - | ^~~~~~~~ -stdout: -60. input.at:2764: ok ./input.at:3037: test ! -f output.c -./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y ./input.at:3040: sed -e '1,8d' input.y -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:3156: sed 's,.*/$,,' stderr 1>&2 -./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y -./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -25. input.at:1139: ok -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y +54. input.at:2429: ok ./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +49. input.at:2224: ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + ok +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -36. input.at:1642: ok -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -70. input.at:3231: ok -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -72. named-refs.at:196: testing Undefined and ambiguous references ... +67. input.at:3148: testing Cannot type action ... +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y +68. input.at:3171: testing Character literals and api.token.raw ... +./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none +68. input.at:3171: ok +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none + +48. input.at:2191: ok +stderr: +input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] +input.y:13.1-18: note: previous declaration +input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +64. input.at:2946: ok +69. input.at:3205: testing %token-table and parse.error ... +./input.at:3133: sed 's,.*/$,,' stderr 1>&2 +./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none +61. input.at:2793: ok +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error + +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +70. input.at:3231: testing Invalid file prefix mapping arguments ... +./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y +71. named-refs.at:22: testing Tutorial calculator ... +./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y +72. named-refs.at:196: testing Undefined and ambiguous references ... +./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y +./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +72. named-refs.at:196: ok +./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y +70. input.at:3231: ok +62. input.at:2840: ok ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y 73. named-refs.at:297: testing Misleading references ... -./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y +input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] + 10 | exp: {} + | ^~~~~~~~ + +./input.at:3156: sed 's,.*/$,,' stderr 1>&2 +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y 74. named-refs.at:316: testing Many kinds of errors ... +./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error 75. named-refs.at:551: testing Missing identifiers in brackets ... ./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y -72. named-refs.at:196: ok ./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y 75. named-refs.at:551: ok -64. input.at:2946: ok -./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none - -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y - -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +74. named-refs.at:316: ok +./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y 69. input.at:3205: ok -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror 76. named-refs.at:567: testing Redundant words in brackets ... -74. named-refs.at:316: ok + ./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y + +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +76. named-refs.at:567: ok 77. named-refs.at:583: testing Comments in brackets ... ./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -53. input.at:2393: 78. named-refs.at:599: testing Stray symbols in brackets ... - ok +78. named-refs.at:599: testing Stray symbols in brackets ... ./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 -76. named-refs.at:567: ok -./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none 77. named-refs.at:583: ok -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror +./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +78. named-refs.at:599: ok +79. named-refs.at:618: testing Redundant words in LHS brackets ... +stderr: +./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] +test.y:11.8-10: note: refers to: $foo at $1 +test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 +79. named-refs.at:618: 80. named-refs.at:635: testing Factored LHS ... + ok +./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +66. input.at:3113: ok +./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -78. named-refs.at:599: ok -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y 81. named-refs.at:648: testing Unresolved references ... -79. named-refs.at:618: testing Redundant words in LHS brackets ... ./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error +stderr: 82. named-refs.at:715: testing $ or @ followed by . or - ... -./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y - -./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -80. named-refs.at:635: testing Factored LHS ... -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror -./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -81. named-refs.at:648: ok -79. named-refs.at:618: ok -./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 83. output.at:68: testing Output files: -dv ... +input.y:2.15: error: stray '$' [-Werror=other] +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +58. input.at:2675: ok ./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv foo.y -stderr: -84. output.at:74: testing Output files: -dv >&- ... -./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac -input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] -input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] -input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] -input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] -input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] +./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y +81. named-refs.at:648: ok +./input.at:2733: sed 's,.*/$,,' stderr 1>&2 -62. input.at:2840: ok -./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y -./input.at:2247: sed 's,.*/$,,' stderr 1>&2 +80. named-refs.at:635: ok -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y +84. output.at:74: testing Output files: -dv >&- ... +./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac -66. input.at:3113: ok 85. output.at:81: testing Output files: -dv -o foo.c ... -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y +./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y ./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.c foo.y -stderr: 86. output.at:84: testing Output files: -dv -y ... -test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] -test.y:11.8-10: note: refers to: $foo at $1 -test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 - -stderr: ./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +stderr: ./output.at:68: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none +67. input.at:3148: ok +83. output.at:68: ok + stderr: -input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./output.at:81: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 + +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror +./output.at:81: grep '#include "foo.h"' foo.c stderr: -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error 87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... -./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -80. named-refs.at:635: ok -88. output.at:92: testing Output files: -dv -o foo.tab.c ... -67. input.at:3148: ok -./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y -input.y:2.15: error: stray '$' [-Werror=other] -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -stderr: -./input.at:2214: sed 's,.*/$,,' stderr 1>&2 -./input.at:2730: sed 's,.*/$,,' stderr 1>&2 ./output.at:74: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +stdout: +88. output.at:92: testing Output files: -dv -o foo.tab.c ... +#include "foo.h" +./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y +./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +84. output.at:74: 85. output.at:81: ok + ok -83. output.at:68: ok -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error -84. output.at:74: - ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none 89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... -89. output.at:95: 90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... -stderr: +89. output.at:95: stderr: +test.y:4.9: error: stray '$' [-Werror=other] +test.y:5.9: error: stray '@' [-Werror=other] +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... skipped (output.at:95) -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y -./output.at:81: find . -type f | +90. output.at:97: stderr: +./output.at:84: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -90. output.at:97: 61. input.at:2793: ok -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none skipped (output.at:97) - -91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... -./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y +./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 stderr: -./output.at:81: grep '#include "foo.h"' foo.c -foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] -./output.at:87: find . -type f | +./output.at:92: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error +./output.at:84: grep '#include "y.tab.h"' y.tab.c +stdout: +91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... stderr: -stderr: -./output.at:84: find . -type f | +88. output.at:92: ok +86. output.at:84: ok +foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] +./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y +./output.at:87: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stdout: -#include "foo.h" 92. output.at:102: testing Output files: -dv -g --xml -y ... - -./output.at:92: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 ./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y -./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./output.at:87: grep '#include "./foo.h"' y.tab.c -./output.at:84: grep '#include "y.tab.h"' y.tab.c -stderr: -85. output.at:81: ok -stdout: -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none + + stdout: #include "./foo.h" -input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] -94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... -93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... -86. output.at:84: 95. output.at:110: testing Output files: -dv -b bar ... -./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y -./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y - ok 87. output.at:87: ok -./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y -stderr: - -input.y:2.8-10: error: duplicate directive [-Werror=other] - 2 | %start exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:1124: sed 's,.*/$,,' stderr 1>&2 -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -stderr: -test.y:4.9: error: stray '$' [-Werror=other] -test.y:5.9: error: stray '@' [-Werror=other] -88. output.at:92: ok -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none +73. named-refs.at:297: ok +93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... +94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1877: sed 's,.*/$,,' stderr 1>&2 -./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y +./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y +./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none +95. output.at:110: testing Output files: -dv -b bar ... 96. output.at:112: testing Output files: -dv -g -o foo.c ... - -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y -97. output.at:116: testing Output files: %header %verbose ... -./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y stderr: +./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y :6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] -98. output.at:118: testing Output files: %header %verbose %yacc ... -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error ./output.at:100: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -99. output.at:121: testing Output files: %header %verbose %yacc ... -./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y +stderr: +stdout: +./named-refs.at:185: $PREPARSER ./test input.txt +stderr: +./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +91. output.at:100: ok stderr: stderr: -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none ./output.at:102: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +71. named-refs.at:22: ok + +stderr: +./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y ./output.at:107: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 stderr: -91. output.at:100: ./output.at:110: find . -type f | + +92. output.at:102: ok +stderr: +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none +./output.at:110: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - ok -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -92. output.at:102: ok -stderr: foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none ./output.at:104: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -95. output.at:110: ok -94. output.at:107: -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none - ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y - +97. output.at:116: testing Output files: %header %verbose ... +./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +98. output.at:118: testing Output files: %header %verbose %yacc ... +94. output.at:107: ok 93. output.at:104: ok -stderr: +95. output.at:110: ok -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none stderr: -100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... +./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y ./output.at:112: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y + + + +99. output.at:121: testing Output files: %header %verbose %yacc ... +./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +96. output.at:112: ok +100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... +./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y +101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... ./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... +./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y + +stderr: +103. output.at:136: testing Output files: %header %verbose ... +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y ./output.at:116: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... -102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... -96. output.at:112: ok - -./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -49. input.at:2224: ok -97. output.at:116: ok - +82. named-refs.at:715: ok +./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: +97. output.at:116: ok ./output.at:118: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -54. input.at:2429: ok -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none + stderr: -./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +stderr: +./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y +98. output.at:118: ok + ./output.at:121: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -103. output.at:136: testing Output files: %header %verbose ... - - -./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -98. output.at:118: ok -104. output.at:139: testing Output files: %header %verbose -o foo.c ... -99. output.at:121: stderr: - ok - -105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... -stdout: -./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -73. named-refs.at:297: ok -./named-refs.at:185: $PREPARSER ./test input.txt -stderr: -./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y -./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... -./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -stderr: -stderr: -./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy ./output.at:125: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -71. named-refs.at:22: ok -108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... - - -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none stderr: -109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... -./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy -./output.at:127: find . -type f | +./output.at:129: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -100. output.at:125: ok -./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none +104. output.at:139: testing Output files: %header %verbose -o foo.c ... +99. output.at:121: ok +./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy +100. output.at:125: 105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... + ok +102. output.at:129: stderr: -stderr: -./output.at:129: find . -type f | + ok +./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy +./output.at:127: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + + +101. output.at:127: ok +106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... + +./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy stderr: -48. input.at:2191: ok -./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -111. output.at:173: testing Output files: %defines -o foo.c++ ... -./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +63. input.at:2883: 109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... + ok ./output.at:136: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -101. output.at:127: ok +107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... +./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy +./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy 103. output.at:136: ok -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... +110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... +./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y -102. output.at:129: ok -./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -stderr: -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +111. output.at:173: testing Output files: %defines -o foo.c++ ... stderr: -113. output.at:191: testing Output files: lalr1.cc ... -./output.at:142: find . -type f | +./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +./output.at:139: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:146: find . -type f | +stderr: +./output.at:142: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror +104. output.at:139: 112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... + ok +./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy stderr: - -./output.at:150: find . -type f | +105. output.at:142: stderr: + ok +stderr: +./output.at:154: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -24. input.at:1074: ok - -stderr: -115. output.at:197: testing Output files: lalr1.cc %header %verbose ... -105. output.at:142: ./output.at:139: find . -type f | +./output.at:146: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - ok -114. output.at:194: testing Output files: lalr1.cc %verbose ... -./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -107. output.at:150: ok -./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -106. output.at:146: stderr: - ok -./output.at:154: find . -type f | + +./output.at:150: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... - -stderr: -104. output.at:139: +106. output.at:146: stderr: ok +108. output.at:154: ok foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other] foo.y: error: 1 nonterminal useless in grammar [-Werror=other] foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other] -./output.at:160: find . -type f | +107. output.at:150: ./output.at:160: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + ok + +113. output.at:191: testing Output files: lalr1.cc ... +109. output.at:160: ok + +./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +59. input.at:2719: + ok + +stderr: stderr: -./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy ./output.at:173: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -40. input.at:1826: ok -82. named-refs.at:715: ok - -109. output.at:160: ok -stderr: -111. output.at:173: foo.y:1.1-15: error: %define variable 'useless' is not used - ok +foo.y:1.1-15: error: %define variable 'useless' is not used ./output.at:167: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -118. output.at:206: testing Output files: lalr1.cc %header %verbose ... -117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... -./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -108. output.at:154: ok -119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... +114. output.at:194: testing Output files: lalr1.cc %verbose ... +./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +115. output.at:197: testing Output files: lalr1.cc %header %verbose ... +116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... -120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... -./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy -121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... -stderr: 110. output.at:167: ok - -input.y:2.15: error: stray '$' [-Werror=other] - - -122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... +117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... +111. output.at:173: ok +./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: -./input.at:2733: sed 's,.*/$,,' stderr 1>&2 ./output.at:176: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -stderr: +119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... +112. output.at:176: ok +118. output.at:206: testing Output files: lalr1.cc %header %verbose ... +./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy +120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... +stderr: ./output.at:191: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... + +113. output.at:191: ok +stderr: +stderr: ./output.at:197: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... -126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error -stderr: -125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y -./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 -112. output.at:176: ok -./output.at:194: find . -type f | +122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... +./output.at:200: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + stderr: -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -115. output.at:197: ok -128. output.at:282: testing Conflicting output files: -o foo.y ... -./output.at:200: find . -type f | +./output.at:194: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -113. output.at:191: ./output.at:237: rm -f foo.yy.bak - ok -127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y -./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -114. output.at:194: ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy +115. output.at:197: ok 116. output.at:200: ok - - -./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy -./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y - +./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy stderr: +123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... +114. output.at:194: ok ./output.at:203: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: +./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + +stderr: +./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy ./output.at:206: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -130. output.at:335: testing Output file name: ( ... -./output.at:335: touch "(.tmp" || exit 77 -129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... -./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 +117. output.at:203: ok + +124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... +125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... +./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 +./output.at:206: grep 'include .subdir/' foo.tab.cc +./output.at:206: grep 'include .subdir/' foo.tab.hh +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y +./output.at:237: rm -f foo.yy.bak +./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + +118. output.at:206: ok +126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y stderr: stderr: -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y -./output.at:206: grep 'include .subdir/' foo.tab.cc -./output.at:226: find . -type f | +stderr: +127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... +./output.at:215: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y -58. input.at:2675: ok -131. output.at:336: testing Output file name: ) ... -./output.at:336: touch ").tmp" || exit 77 -./output.at:231: find . -type f | +./output.at:210: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:226: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:206: grep 'include .subdir/' foo.tab.hh -132. output.at:337: testing Output file name: # ... -./output.at:337: touch "#.tmp" || exit 77 -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y -122. output.at:226: ok -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y -123. output.at:231: ./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror -./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y +./output.at:210: grep 'include .subdir/' subdir/foo.cc +128. output.at:282: testing Conflicting output files: -o foo.y ... +120. output.at:215: ok +./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y +122. output.at:226: ./output.at:210: grep 'include .subdir/' subdir/foo.hh ok -133. output.at:338: testing Output file name: @@ ... -./output.at:338: touch "@@.tmp" || exit 77 -117. output.at:203: ok -118. output.at:206: ok -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y - - stderr: - -./output.at:237: find . -type f | +./output.at:231: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror -./output.at:282: cat foo.y +119. output.at:210: ok stderr: -124. output.at:237: ok -134. output.at:339: testing Output file name: @{ ... -./output.at:339: touch "@{.tmp" || exit 77 -./output.at:215: find . -type f | +./output.at:220: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -135. output.at:340: testing Output file name: @} ... -./output.at:340: touch "@}.tmp" || exit 77 -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y -136. output.at:341: testing Output file name: [ ... -./output.at:341: touch "[.tmp" || exit 77 + +123. output.at:231: ok + +121. output.at:220: ok +129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... +./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 + +130. output.at:335: testing Output file name: ( ... +./output.at:335: touch "(.tmp" || exit 77 +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror + +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +131. output.at:336: testing Output file name: ) ... +./output.at:336: touch ").tmp" || exit 77 stderr: -137. output.at:342: testing Output file name: ] ... -./output.at:342: touch "].tmp" || exit 77 -./output.at:220: find . -type f | +./output.at:237: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror +132. output.at:337: testing Output file name: # ... +./output.at:337: touch "#.tmp" || exit 77 +124. output.at:237: ok +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y +133. output.at:338: testing Output file name: @@ ... +./output.at:338: touch "@@.tmp" || exit 77 +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror +./output.at:282: cat foo.y +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y 128. output.at:282: ok -120. output.at:215: ok -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" + +134. output.at:339: testing Output file name: @{ ... +./output.at:339: touch "@{.tmp" || exit 77 +stderr: ./output.at:335: ls "(.c" "(.h" +stderr: +foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y +foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] +stdout: +135. output.at:340: testing Output file name: @} ... +./output.at:340: touch "@}.tmp" || exit 77 +(.c +(.h ./output.at:336: ls ").c" ").h" -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" +./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" +./output.at:267: sed 's,.*/$,,' stderr 1>&2 stdout: -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y `~!@#$%^&*()-=_+{}[]|\:;<>, .'.c `~!@#$%^&*()-=_+{}[]|\:;<>, .'.h stdout: -stdout: -(.c -(.h -stderr: +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y +./output.at:272: sed 's,.*/$,,' stderr 1>&2 ).c ).h ./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" +./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error +./output.at:337: ls "#.c" "#.h" +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error stderr: -./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" -./output.at:210: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 -121. output.at:220: ./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" - ok -138. output.at:363: testing Graph with no conflicts ... -foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] -./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - - -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none -stderr: +foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] +stdout: +#.c +#.h +./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" +./output.at:277: sed 's,.*/$,,' stderr 1>&2 ./output.at:338: ls "@@.c" "@@.h" -foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] -./output.at:337: ls "#.c" "#.h" +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error stdout: -./output.at:210: grep 'include .subdir/' subdir/foo.cc -139. output.at:403: testing Graph with unsolved S/R ... -./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y @@.c @@.h -141. output.at:538: testing Graph with R/R ... -stdout: -#.c -#.h ./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" -./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:272: sed 's,.*/$,,' stderr 1>&2 -./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" - -./output.at:267: sed 's,.*/$,,' stderr 1>&2 -stderr: -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y -./output.at:210: grep 'include .subdir/' subdir/foo.hh -140. output.at:473: testing Graph with solved S/R ... -foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] -./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error -119. output.at:210: ok +./output.at:339: ls "@{.c" "@{.h" +stdout: ./output.at:340: ls "@}.c" "@}.h" -./output.at:277: sed 's,.*/$,,' stderr 1>&2 -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error -142. output.at:576: testing Graph with reductions with multiple LAT ... +@{.c +@{.h +./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" stdout: @}.c @}.h -./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y ./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none +./output.at:267: cat foo.y +125. output.at:267: ok + +./output.at:272: cat foo.y +136. output.at:341: testing Output file name: [ ... +./output.at:341: touch "[.tmp" || exit 77 +126. output.at:272: ok +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y +./output.at:277: cat foo.y + +127. output.at:277: ok + +137. output.at:342: testing Output file name: ] ... +./output.at:342: touch "].tmp" || exit 77 +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y +138. output.at:363: testing Graph with no conflicts ... +./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y ./output.at:341: ls "[.c" "[.h" -./output.at:342: ls "].c" "].h" -stdout: stdout: -./output.at:339: ls "@{.c" "@{.h" [.c [.h -stdout: ./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" -@{.c -@{.h - -./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" +./output.at:342: ls "].c" "].h" +stdout: ].c ].h +./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" stderr: -stderr: -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] -./output.at:538: grep -v // input.gv ./output.at:363: grep -v // input.gv +138. output.at:363: ok + +139. output.at:403: testing Graph with unsolved S/R ... +./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y stderr: -./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother] ./output.at:403: grep -v // input.gv -141. output.at:538: 138. output.at:363: ok - ok -./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y -139. output.at:403: 63. input.at:2883: ok - ok -143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... -./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none +139. output.at:403: ok +140. output.at:473: testing Graph with solved S/R ... +./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y stderr: - -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none input.y:6.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:15.10-18: warning: rule useless in parser due to conflicts [-Wother] +./output.at:473: grep -v // input.gv +140. output.at:473: ok +141. output.at:538: testing Graph with R/R ... +./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +stderr: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] +./output.at:538: grep -v // input.gv +141. output.at:538: ok stderr: -./output.at:473: grep -v // input.gv -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -145. diagnostics.at:84: testing Warnings ... +stdout: +142. output.at:576: testing Graph with reductions with multiple LAT ... +./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y +stderr: +stdout: +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y +stderr: +stderr: +stdout: +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y +stdout: +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y +./output.at:336: ls ").cc" ").hh" +stderr: +stdout: +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y +stdout: +).cc +).hh stderr: +stdout: +./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" stderr: input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:2.14-18: warning: rule useless in parser due to conflicts [-Wother] input.y:5.3: warning: rule useless in parser due to conflicts [-Wother] -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -144. output.at:744: testing C++ Output File Prefix Mapping ... +stderr: +stdout: ./output.at:576: grep -v // input.gv -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y +./output.at:338: ls "@@.cc" "@@.hh" +stdout: +@@.cc +@@.hh +142. output.at:576: ok +./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" +./output.at:339: ls "@{.cc" "@{.hh" +stdout: +@{.cc +@{.hh +./output.at:340: ls "@}.cc" "@}.hh" +./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" +stdout: + +@}.cc +@}.hh +./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" +stdout: +143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh +./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" +./output.at:335: ls "(.cc" "(.hh" +./output.at:337: ls "#.cc" "#.hh" +stdout: +stdout: +(.cc +(.hh +#.cc +#.hh +./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" +./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" +stderr: +stdout: +stderr: +stdout: +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y +./output.at:341: ls "[.cc" "[.hh" +stdout: +[.cc +[.hh +./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" +./output.at:342: ls "].cc" "].hh" +stdout: +].cc +].hh +./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" +stderr: +input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./output.at:641: grep -v // input.gv +143. output.at:641: ok + +144. output.at:744: testing C++ Output File Prefix Mapping ... ./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy -147. diagnostics.at:182: testing Line is too short, and then you die ... -./input.at:776: sed 's,.*/$,,' stderr 1>&2 +./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc +stderr: +stdout: +135. output.at:340: ok + +145. diagnostics.at:84: testing Warnings ... +stderr: +stderr: +stdout: +stdout: +./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh +129. output.at:328: ok +./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh + +./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy 146. diagnostics.at:133: testing Single point locations ... -142. output.at:576: ok -./input.at:786: sed 's,.*/$,,' stderr 1>&2 -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc +stderr: +stdout: +132. output.at:337: ok +stderr: +stdout: +131. output.at:336: ok -140. output.at:473: ok -./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none +147. diagnostics.at:182: testing Line is too short, and then you die ... 148. diagnostics.at:217: testing Zero-width characters ... +stderr: +stdout: +136. output.at:341: ok + 149. diagnostics.at:235: testing Tabulations and multibyte characters ... -./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none -./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y -./output.at:272: cat foo.y -./output.at:267: cat foo.y -125. output.at:267: 126. output.at:272: ok - ok +stderr: +stdout: +134. output.at:339: ok +150. diagnostics.at:282: testing Tabulations and multibyte characters ... +stderr: +stdout: +137. output.at:342: ok 151. diagnostics.at:303: testing Special files ... -150. diagnostics.at:282: testing Tabulations and multibyte characters ... -./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y -./output.at:277: cat foo.y -127. output.at:277: ok +stderr: +stdout: +130. output.at:335: ok +stderr: +stdout: +133. output.at:338: ok + 152. diagnostics.at:328: testing Complaints from M4 ... -./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +153. diagnostics.at:351: testing Carriage return ... +stderr: +stdout: +./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS +./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 ./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -59. input.at:2719: ok ./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y - -stderr: -input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./diagnostics.at:182: "$PERL" -pi -e ' +./diagnostics.at:217: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5003,8 +5157,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:641: grep -v // input.gv -./diagnostics.at:217: "$PERL" -pi -e ' +./diagnostics.at:351: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5013,18 +5166,7 @@ } ' experr || exit 77 ./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -143. output.at:641: ok -153. diagnostics.at:351: testing Carriage return ... -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -148. diagnostics.at:217: ok -147. diagnostics.at:182: ok - - -./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 - -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:282: "$PERL" -pi -e ' +./diagnostics.at:182: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5032,7 +5174,13 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:303: "$PERL" -pi -e ' +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +148. diagnostics.at:217: ok +147. diagnostics.at:182: ok +153. diagnostics.at:351: ok + +./diagnostics.at:282: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5040,7 +5188,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:84: "$PERL" -pi -e ' +./diagnostics.at:133: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5048,8 +5196,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 + + ./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:351: "$PERL" -pi -e ' +./diagnostics.at:303: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5057,7 +5207,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:133: "$PERL" -pi -e ' +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:328: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5065,12 +5216,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -155. diagnostics.at:399: testing Screen width: 200 columns ... -154. diagnostics.at:372: testing CR NL ... -./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:235: "$PERL" -pi -e ' +./diagnostics.at:84: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5078,7 +5224,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:328: "$PERL" -pi -e ' +154. diagnostics.at:372: testing CR NL ... +./diagnostics.at:235: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5086,29 +5233,21 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +155. diagnostics.at:399: testing Screen width: 200 columns ... +./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y 156. diagnostics.at:432: testing Screen width: 80 columns ... -./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -153. diagnostics.at:351: ok - ./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y +146. diagnostics.at:133: ok 150. diagnostics.at:282: ok -157. diagnostics.at:465: testing Screen width: 60 columns ... 151. diagnostics.at:303: ok -145. diagnostics.at:84: 146. diagnostics.at:133: ok - ok -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y - -149. diagnostics.at:235: ok - - - -./diagnostics.at:372: "$PERL" -pi -e ' +./diagnostics.at:399: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5116,11 +5255,14 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 + +152. diagnostics.at:328: ok +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y +145. diagnostics.at:84: ok + +157. diagnostics.at:465: testing Screen width: 60 columns ... +149. diagnostics.at:235: ok 158. diagnostics.at:504: testing Suggestions ... -159. diagnostics.at:527: testing Counterexamples ... -152. diagnostics.at:328: ./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none - ok -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:432: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5129,15 +5271,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -160. diagnostics.at:645: testing Deep Counterexamples ... -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -162. skeletons.at:25: testing Relative skeleton file names ... -./skeletons.at:27: mkdir tmp -161. diagnostics.at:713: testing Indentation with message suppression ... -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y -./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y -./diagnostics.at:399: "$PERL" -pi -e ' + + +./diagnostics.at:372: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5145,8 +5281,16 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y +159. diagnostics.at:527: testing Counterexamples ... +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y +160. diagnostics.at:645: testing Deep Counterexamples ... +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +161. diagnostics.at:713: testing Indentation with message suppression ... +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y +./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:504: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5155,7 +5299,11 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:465: "$PERL" -pi -e ' +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +162. skeletons.at:25: testing Relative skeleton file names ... +./skeletons.at:27: mkdir tmp +./diagnostics.at:645: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5163,15 +5311,18 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y -./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y -158. diagnostics.at:504: ok -163. skeletons.at:85: testing Installed skeleton file names ... -./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y +155. diagnostics.at:399: 158. diagnostics.at:504: ok + ok +./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y + + +160. diagnostics.at:645: ok ./skeletons.at:64: cat input-gram.tab.c -./diagnostics.at:645: "$PERL" -pi -e ' +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror +156. diagnostics.at:432: ok +./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y +./diagnostics.at:465: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5179,15 +5330,11 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +163. skeletons.at:85: testing Installed skeleton file names ... +164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./skeletons.at:69: cat input-gram.tab.c -154. diagnostics.at:372: ok -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror -./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y -156. diagnostics.at:432: ok +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y +./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y ./diagnostics.at:527: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5196,348 +5343,72 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -160. diagnostics.at:645: ok -164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... ./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -157. diagnostics.at:465: ok - - -./skeletons.at:74: cat input-cmd-line.tab.c -155. diagnostics.at:399: ok -./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS +154. diagnostics.at:372: ok ./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +165. skeletons.at:166: testing Complaining during macro argument expansion ... +./skeletons.at:69: cat input-gram.tab.c +./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y 166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... +./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y 167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... ./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77 -./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y 164. skeletons.at:142: ok - -stderr: -./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -stdout: -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y -170. sets.at:153: testing Firsts ... -./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -15. input.at:774: ok -166. skeletons.at:248: ok -stderr: -stdout: -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y - -stdout: - -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh -./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" -172. sets.at:269: testing Build relations ... -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -171. sets.at:228: testing Accept ... -./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -stderr: -stderr: -stdout: -stdout: -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y -16. input.at:784: ok -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y -stderr: -bison (GNU Bison) 3.8.2 -RITEM - 0: exp $end (rule 0) - 3: exp '<' exp (rule 1) - 7: exp '>' exp (rule 2) - 11: exp '+' exp (rule 3) - 15: exp '-' exp (rule 4) - 19: exp '^' exp (rule 5) - 23: exp '=' exp (rule 6) - 27: "exp" (rule 7) - - -DERIVES - $accept derives - 0 exp $end - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -NULLABLE - $accept: no - exp: no - - -RTC: Firsts Input BEGIN - - 01 - .--. - 0| 1| - 1| 1| - `--' -RTC: Firsts Input END - -RTC: Firsts Output BEGIN - - 01 - .--. - 0|11| - 1| 1| - `--' -RTC: Firsts Output END - -FIRSTS - $accept firsts - $accept - exp - exp firsts - exp - - -FDERIVES - $accept derives - 0 exp $end - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -relation_transpose: - 0: 1 2 3 4 5 6 - 1: 1 2 3 4 5 6 - 2: 1 2 3 4 5 6 - 3: 1 2 3 4 5 6 - 4: 1 2 3 4 5 6 - 5: 1 2 3 4 5 6 - 6: 1 2 3 4 5 6 - -relation_transpose: output: - 1: 0 1 2 3 4 5 6 - 2: 0 1 2 3 4 5 6 - 3: 0 1 2 3 4 5 6 - 4: 0 1 2 3 4 5 6 - 5: 0 1 2 3 4 5 6 - 6: 0 1 2 3 4 5 6 - -follows after includes: - FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' - -Lookaheads: - State 1: - rule 7: - State 3: - rule 0: - State 10: - rule 1: $end '<' '>' '+' '-' '^' '=' - State 11: - rule 2: $end '<' '>' '+' '-' '^' '=' - State 12: - rule 3: $end '<' '>' '+' '-' '^' '=' - State 13: - rule 4: $end '<' '>' '+' '-' '^' '=' - State 14: - rule 5: $end '<' '>' '+' '-' '^' '=' - State 15: - rule 6: $end '<' '>' '+' '-' '^' '=' - -./sets.at:172: sed -f extract.sed stderr -./output.at:336: ls ").cc" ").hh" -170. sets.at:153: ok - -stdout: -).cc -).hh -./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" - -./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -173. sets.at:315: testing Reduced Grammar ... -stdout: -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -final state 6 -./sets.at:248: sed -n ' - /^State \(.*\)/{ - s//final state \1/ - x - } - / accept/{ - x - p - q - } - ' input.output -174. sets.at:394: testing Reduced Grammar with prec and assoc ... -./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -171. sets.at:228: ok stderr: -stderr: -stdout: -stdout: - -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y -./output.at:338: ls "@@.cc" "@@.hh" -stdout: -@@.cc -@@.hh -./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" +input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] + 2 | %pure-parser + | ^~~~~~~~~~~~ + | %define api.pure +input.y:3.1-14: error: deprecated directive: '%error-verbose', use '%define parse.error verbose' [-Werror=deprecated] + 3 | %error-verbose + | ^~~~~~~~~~~~~~ + | %define parse.error verbose ./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./output.at:335: ls "(.cc" "(.hh" -175. reduce.at:26: testing Useless Terminals ... -./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -stderr: -stdout: -stdout: -(.cc -(.hh -stderr: -./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" -input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:2.14-17: error: rule useless in parser due to conflicts [-Werror=other] - 2 | expr: term | term | term | term | term | term - | ^~~~ -input.y:2.21-24: error: rule useless in parser due to conflicts [-Werror=other] - 2 | expr: term | term | term | term | term | term - | ^~~~ -input.y:2.28-31: error: rule useless in parser due to conflicts [-Werror=other] - 2 | expr: term | term | term | term | term | term - | ^~~~ -input.y:2.35-38: error: rule useless in parser due to conflicts [-Werror=other] - 2 | expr: term | term | term | term | term | term - | ^~~~ -input.y:2.42-45: error: rule useless in parser due to conflicts [-Werror=other] - 2 | expr: term | term | term | term | term | term - | ^~~~ -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y -./sets.at:286: sed 's,.*/$,,' stderr 1>&2 -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror -167. skeletons.at:302: ok -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -174. sets.at:394: ok -./output.at:337: ls "#.cc" "#.hh" - -stdout: -./output.at:341: ls "[.cc" "[.hh" -#.cc -#.hh +./skeletons.at:74: cat input-cmd-line.tab.c -./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" -stdout: -[.cc -[.hh -./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" +162. skeletons.at:25: ok +./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS +./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 +./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error +./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./output.at:339: ls "@{.cc" "@{.hh" +157. diagnostics.at:465: ok +159. diagnostics.at:527: ok +168. sets.at:27: testing Nullable ... +./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +167. skeletons.at:302: ok +stderr: stdout: -176. reduce.at:70: testing Useless Nonterminals ... -@{.cc -@{.hh -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" 169. sets.at:111: testing Broken Closure ... ./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -177. reduce.at:120: testing Useless Rules ... -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output -stderr: -175. reduce.at:26: bison (GNU Bison) 3.8.2 -input.y: error: 1 nonterminal useless in grammar [-Werror=other] -input.y: error: 1 rule useless in grammar [-Werror=other] -input.y:4.1-7: error: nonterminal useless in grammar: useless [-Werror=other] -Reduced Grammar - -ntokens = 7, nnterms = 4, nsyms = 11, nrules = 6, nritems = 17 - -Tokens ------- - -Value Sprec Sassoc Tag - 0 0 0 $end - 1 0 0 error - 2 0 0 $undefined - 3 0 0 "+" - 4 0 0 "*" - 5 0 0 "useless" - 6 0 0 "num" - - -Nonterminals ------------- - -Value Tag - 7 $accept - 8 expr - 9 term - 10 fact - - -Rules ------ - -Num (Prec, Assoc, Useful, UselessChain) Lhs -> (Ritem Range) Rhs - 0 ( 0, 0, t, f) 7 -> ( 0- 1) 8 0 - 1 ( 0, 0, t, f) 8 -> ( 3- 5) 8 3 9 - 2 ( 0, 0, t, t) 8 -> ( 7- 7) 9 - 3 ( 0, 0, t, f) 9 -> ( 9-11) 9 4 10 - 4 ( 0, 0, t, t) 9 -> (13-13) 10 - 5 ( 0, 0, t, t) 10 -> (17-17) 6 - 6 ( 0, 0, f, t) 11 -> (15-15) 5 - - -Rules interpreted ------------------ - -0 $accept: expr $end -1 expr: expr "+" term -2 expr: term -3 term: term "*" fact -4 term: fact -5 fact: "num" -6 useless: "useless" +./output.at:836: $PREPARSER ./parser -reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions. - ok -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./sets.at:325: sed 's,.*/$,,' stderr 1>&2 stderr: -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error -stdout: -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y -165. skeletons.at:166: testing Complaining during macro argument expansion ... -./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y +./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +166. skeletons.at:248: ok +144. output.at:744: ok ./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y +170. sets.at:153: testing Firsts ... +./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +171. sets.at:228: testing Accept ... +./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +172. sets.at:269: testing Build relations ... + +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y + +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none ./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y -168. sets.at:27: testing Nullable ... -./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +174. sets.at:394: testing Reduced Grammar with prec and assoc ... +173. sets.at:315: testing Reduced Grammar ... +./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y stderr: +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y bison (GNU Bison) 3.8.2 RITEM 0: e $end (rule 0) @@ -5611,10 +5482,6 @@ ./sets.at:43: sed -f extract.sed stderr 168. sets.at:27: ok -165. skeletons.at:166: ok -178. reduce.at:224: testing Useless Parts ... -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y stderr: bison (GNU Bison) 3.8.2 RITEM @@ -5856,79 +5723,279 @@ State 10: rule 0: - ./sets.at:127: sed -n 's/[ ]*$//;/^RTC: Firsts Output BEGIN/,/^RTC: Firsts Output END/p' stderr + 169. sets.at:111: ok -179. reduce.at:312: testing Reduced Automaton ... -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y +165. skeletons.at:166: ok +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -stderr: + +175. reduce.at:26: testing Useless Terminals ... +./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror stdout: -./skeletons.at:122: $PREPARSER ./input-cmd-line +final state 6 +176. reduce.at:70: testing Useless Nonterminals ... stderr: +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +177. reduce.at:120: testing Useless Rules ... +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +bison (GNU Bison) 3.8.2 +RITEM + 0: exp $end (rule 0) + 3: exp '<' exp (rule 1) + 7: exp '>' exp (rule 2) + 11: exp '+' exp (rule 3) + 15: exp '-' exp (rule 4) + 19: exp '^' exp (rule 5) + 23: exp '=' exp (rule 6) + 27: "exp" (rule 7) -syntax error, unexpected 'a', expecting end of file -./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./output.at:342: ls "].cc" "].hh" + +DERIVES + $accept derives + 0 exp $end + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +NULLABLE + $accept: no + exp: no + + +RTC: Firsts Input BEGIN + + 01 + .--. + 0| 1| + 1| 1| + `--' +RTC: Firsts Input END + +RTC: Firsts Output BEGIN + + 01 + .--. + 0|11| + 1| 1| + `--' +RTC: Firsts Output END + +FIRSTS + $accept firsts + $accept + exp + exp firsts + exp + + +FDERIVES + $accept derives + 0 exp $end + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +relation_transpose: + 0: 1 2 3 4 5 6 + 1: 1 2 3 4 5 6 + 2: 1 2 3 4 5 6 + 3: 1 2 3 4 5 6 + 4: 1 2 3 4 5 6 + 5: 1 2 3 4 5 6 + 6: 1 2 3 4 5 6 + +relation_transpose: output: + 1: 0 1 2 3 4 5 6 + 2: 0 1 2 3 4 5 6 + 3: 0 1 2 3 4 5 6 + 4: 0 1 2 3 4 5 6 + 5: 0 1 2 3 4 5 6 + 6: 0 1 2 3 4 5 6 + +follows after includes: + FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' + +Lookaheads: + State 1: + rule 7: + State 3: + rule 0: + State 10: + rule 1: $end '<' '>' '+' '-' '^' '=' + State 11: + rule 2: $end '<' '>' '+' '-' '^' '=' + State 12: + rule 3: $end '<' '>' '+' '-' '^' '=' + State 13: + rule 4: $end '<' '>' '+' '-' '^' '=' + State 14: + rule 5: $end '<' '>' '+' '-' '^' '=' + State 15: + rule 6: $end '<' '>' '+' '-' '^' '=' + +./sets.at:248: sed -n ' + /^State \(.*\)/{ + s//final state \1/ + x + } + / accept/{ + x + p + q + } + ' input.output +174. sets.at:394: ok +./sets.at:172: sed -f extract.sed stderr +171. sets.at:228: 170. sets.at:153: ok + ok + + + +stderr: +178. reduce.at:224: testing Useless Parts ... +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y +179. reduce.at:312: testing Reduced Automaton ... +stderr: +input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:2.14-17: error: rule useless in parser due to conflicts [-Werror=other] + 2 | expr: term | term | term | term | term | term + | ^~~~ +input.y:2.21-24: error: rule useless in parser due to conflicts [-Werror=other] + 2 | expr: term | term | term | term | term | term + | ^~~~ +input.y:2.28-31: error: rule useless in parser due to conflicts [-Werror=other] + 2 | expr: term | term | term | term | term | term + | ^~~~ +input.y:2.35-38: error: rule useless in parser due to conflicts [-Werror=other] + 2 | expr: term | term | term | term | term | term + | ^~~~ +input.y:2.42-45: error: rule useless in parser due to conflicts [-Werror=other] + 2 | expr: term | term | term | term | term | term + | ^~~~ +bison (GNU Bison) 3.8.2 +input.y: error: 1 nonterminal useless in grammar [-Werror=other] +input.y: error: 1 rule useless in grammar [-Werror=other] +input.y:4.1-7: error: nonterminal useless in grammar: useless [-Werror=other] +Reduced Grammar + +ntokens = 7, nnterms = 4, nsyms = 11, nrules = 6, nritems = 17 + +Tokens +------ + +Value Sprec Sassoc Tag + 0 0 0 $end + 1 0 0 error + 2 0 0 $undefined + 3 0 0 "+" + 4 0 0 "*" + 5 0 0 "useless" + 6 0 0 "num" + + +Nonterminals +------------ + +Value Tag + 7 $accept + 8 expr + 9 term + 10 fact + + +Rules +----- + +Num (Prec, Assoc, Useful, UselessChain) Lhs -> (Ritem Range) Rhs + 0 ( 0, 0, t, f) 7 -> ( 0- 1) 8 0 + 1 ( 0, 0, t, f) 8 -> ( 3- 5) 8 3 9 + 2 ( 0, 0, t, t) 8 -> ( 7- 7) 9 + 3 ( 0, 0, t, f) 9 -> ( 9-11) 9 4 10 + 4 ( 0, 0, t, t) 9 -> (13-13) 10 + 5 ( 0, 0, t, t) 10 -> (17-17) 6 + 6 ( 0, 0, f, t) 11 -> (15-15) 5 + + +Rules interpreted +----------------- + +0 $accept: expr $end +1 expr: expr "+" term +2 expr: term +3 term: term "*" fact +4 term: fact +5 fact: "num" +6 useless: "useless" + + +reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions. +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y 180. reduce.at:406: testing Underivable Rules ... ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -stdout: +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./sets.at:325: sed 's,.*/$,,' stderr 1>&2 +./sets.at:286: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +161. diagnostics.at:713: ok +175. reduce.at:26: ok +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + + +181. reduce.at:452: testing Bad start symbols ... ./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror +182. reduce.at:550: testing no lr.type: Single State Split ... +./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror +./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: input.y: error: 3 nonterminals useless in grammar [-Werror=other] input.y: error: 3 rules useless in grammar [-Werror=other] input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other] input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other] -181. reduce.at:452: testing Bad start symbols ... -].cc -].hh -./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 -./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" -./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror -162. skeletons.at:25: ok - -stderr: -input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] - 2 | %pure-parser - | ^~~~~~~~~~~~ - | %define api.pure -input.y:3.1-14: error: deprecated directive: '%error-verbose', use '%define parse.error verbose' [-Werror=deprecated] - 3 | %error-verbose - | ^~~~~~~~~~~~~~ - | %define parse.error verbose -182. reduce.at:550: testing no lr.type: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS -./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: -not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] -not-reduced.y: error: 3 rules useless in grammar [-Werror=other] -not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] - 14 | not_reachable: useful { /* A not reachable action. */ } - | ^~~~~~~~~~~~~ -not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] - 17 | non_productive: non_productive useless_token - | ^~~~~~~~~~~~~~ -not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] - 11 | | non_productive { /* A non productive action. */ } - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -159. diagnostics.at:527: ok -./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error input.y: error: 9 nonterminals useless in grammar [-Werror=other] input.y: error: 9 rules useless in grammar [-Werror=other] input.y:10.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] @@ -5958,25 +6025,6 @@ input.y:18.1-8: error: nonterminal useless in grammar: useless9 [-Werror=other] 18 | useless9: '9'; | ^~~~~~~~ -172. sets.at:269: ok -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output -./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none -input.y: error: 1 nonterminal useless in grammar [-Werror=other] -input.y: error: 1 rule useless in grammar [-Werror=other] -input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] - 18 | unused - | ^~~~~~ - -183. reduce.at:550: testing lr.type=lalr: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 -173. sets.at:315: ok -./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none stderr: input.y: error: 2 nonterminals useless in grammar [-Werror=other] input.y: error: 3 rules useless in grammar [-Werror=other] @@ -5989,310 +6037,371 @@ input.y:5.15-25: error: rule useless in grammar [-Werror=other] 5 | exp: useful | underivable; | ^~~~~~~~~~~ +stderr: +not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] +not-reduced.y: error: 3 rules useless in grammar [-Werror=other] +not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] + 14 | not_reachable: useful { /* A not reachable action. */ } + | ^~~~~~~~~~~~~ +not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] + 17 | non_productive: non_productive useless_token + | ^~~~~~~~~~~~~~ +not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] + 11 | | non_productive { /* A non productive action. */ } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +181. reduce.at:452: ok +./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 ./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none +stderr: +input.y: error: 1 nonterminal useless in grammar [-Werror=other] +input.y: error: 1 rule useless in grammar [-Werror=other] +input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] + 18 | unused + | ^~~~~~ ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -161. diagnostics.at:713: ok -./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output + +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error +stderr: +stdout: +./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./skeletons.at:122: $PREPARSER ./input-cmd-line +stderr: +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +syntax error, unexpected 'a', expecting end of file +./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error +183. reduce.at:550: testing lr.type=lalr: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none +172. sets.at:269: ok ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none - -./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y -./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c -179. reduce.at:312: ok -181. reduce.at:452: ok +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +184. reduce.at:550: testing lr.type=ielr: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +173. sets.at:315: ok +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c 185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y - +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output ./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none -184. reduce.at:550: testing lr.type=ielr: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -187. reduce.at:783: testing lr.type=lalr: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y +180. reduce.at:406: ok +./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output +./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c + ./reduce.at:550: sed -n '/^State 0$/,$p' input.output ./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed -n '/^State 0$/,$p' input.output 186. reduce.at:783: testing no lr.type: Lane Split ... ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +stdout: +./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:550: $PREPARSER ./input +stderr: +syntax error +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c +./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +182. reduce.at:550: ok +179. reduce.at:312: ok + +stderr: +stdout: +176. reduce.at:70: ok +./reduce.at:783: sed -n '/^State 0$/,$p' input.output +187. reduce.at:783: testing lr.type=lalr: Lane Split ... 188. reduce.at:783: testing lr.type=ielr: Lane Split ... ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + stderr: stdout: -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y -180. reduce.at:406: ./output.at:340: ls "@}.cc" "@}.hh" - ok -stdout: -@}.cc -@}.hh -./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" - +./reduce.at:550: $PREPARSER ./input 189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed -n '/^State 0$/,$p' input.output +stderr: +syntax error +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +178. reduce.at:224: ok +183. reduce.at:550: ok ./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -177. reduce.at:120: ok -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none + 190. reduce.at:1027: testing no lr.type: Complex Lane Split ... +./reduce.at:783: sed -n '/^State 0$/,$p' input.output ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: +191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -stdout: +stderr: ./reduce.at:550: $PREPARSER ./input +stdout: +177. reduce.at:120: ok stderr: -syntax error ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -176. reduce.at:70: ok -183. reduce.at:550: ok - +stderr: +184. reduce.at:550: stdout: + ok +./skeletons.at:128: $PREPARSER ./input-gram -191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: -stdout: +syntax error, unexpected 'a', expecting end of file +./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +163. skeletons.at:85: ok ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output 192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... -./reduce.at:550: $PREPARSER ./input ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: + +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: stdout: -syntax error -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:550: $PREPARSER ./input +194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... stderr: -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output -./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +185. reduce.at:550: ok + ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -184. reduce.at:550: ok -stderr: -stdout: -182. reduce.at:550: ok -./reduce.at:783: $PREPARSER ./input stderr: stdout: -stderr: ./reduce.at:783: $PREPARSER ./input -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - stderr: +syntax error ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -188. reduce.at:783: ok -194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... -189. reduce.at:783: ok - -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +186. reduce.at:783: ok +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror 196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: stdout: stderr: -178. reduce.at:224: ok +./reduce.at:783: $PREPARSER ./input +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: stdout: +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:783: $PREPARSER ./input stderr: syntax error +188. reduce.at:783: ok ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -186. reduce.at:783: ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror - ok -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +187. reduce.at:783: ok +stderr: +stdout: +stderr: +./reduce.at:783: $PREPARSER ./input +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stderr: +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 198. reduce.at:1627: testing no lr.default-reduction ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -stdout: -./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh -./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh -./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy +189. reduce.at:783: ok +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 + +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./reduce.at:1027: $PREPARSER ./input +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 stderr: +syntax error ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc -192. reduce.at:1027: ok -193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +199. reduce.at:1627: testing lr.default-reduction=most ... stderr: +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stdout: ./reduce.at:1027: $PREPARSER ./input stderr: -syntax error +190. reduce.at:1027: syntax error + ok ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output 191. reduce.at:1027: ok +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -199. reduce.at:1627: testing lr.default-reduction=most ... +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +200. reduce.at:1627: testing lr.default-reduction=consistent ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +201. reduce.at:1627: testing lr.default-reduction=accepting ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stdout: -./reduce.at:1296: $PREPARSER ./input +./reduce.at:1027: $PREPARSER ./input stderr: -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -197. reduce.at:1296: ok +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +192. reduce.at:1027: ok + stderr: stdout: -./skeletons.at:128: $PREPARSER ./input-gram +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1027: $PREPARSER ./input stderr: -syntax error, unexpected 'a', expecting end of file -./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -163. skeletons.at:85: ok -200. reduce.at:1627: testing lr.default-reduction=consistent ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y - - -201. reduce.at:1627: testing lr.default-reduction=accepting ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +193. reduce.at:1027: ok 202. report.at:37: testing Reports ... -202. report.at:37: skipped (report.at:75) ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +202. report.at:37: ./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + skipped (report.at:75) + -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output 203. report.at:3123: testing Reports with conflicts ... -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +204. conflicts.at:28: testing Token declaration order ... +./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +203. report.at:3123: skipped (report.at:3132) + +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./reduce.at:1627: $PREPARSER ./input +./reduce.at:1296: $PREPARSER ./input +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -199. reduce.at:1627: ok - -204. conflicts.at:28: testing Token declaration order ... -./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:1627: $PREPARSER ./input +196. reduce.at:1296: ok stderr: ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -200. reduce.at:1627: ok -203. report.at:3123: skipped (report.at:3132) - +stderr: +stdout: +./reduce.at:1296: $PREPARSER ./input +198. reduce.at:1627: ok +stderr: +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +197. reduce.at:1296: ok + 206. conflicts.at:183: testing Useless associativity warning ... ./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y -205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... -./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +stderr: +./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +207. conflicts.at:218: testing Useless precedence warning ... + +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y +205. conflicts.at:101: ok stderr: stdout: ./reduce.at:1627: $PREPARSER ./input stderr: ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -201. reduce.at:1627: ok +208. conflicts.at:275: testing S/R in initial ... +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +199. reduce.at:1627: ok stderr: -stdout: -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -207. conflicts.at:218: testing Useless precedence warning ... -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y ./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror -stderr: stdout: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' + +./reduce.at:1627: $PREPARSER ./input +209. conflicts.at:301: testing %nonassoc and eof ... +./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] -input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] -input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] stderr: -205. conflicts.at:101: ok stdout: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./reduce.at:783: $PREPARSER ./input +./reduce.at:1627: $PREPARSER ./input +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +200. reduce.at:1627: ok +210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ... +./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +201. reduce.at:1627: ok ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror + +211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... +./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: +./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] +input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] +input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] +212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... +./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -187. reduce.at:783: stdout: -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error stdout: - ok -./reduce.at:1296: $PREPARSER ./input ./conflicts.at:84: $PREPARSER ./input stderr: -stderr: -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -196. reduce.at:1296: ok +./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 204. conflicts.at:28: ok - -208. conflicts.at:275: testing S/R in initial ... -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - - +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error stderr: stdout: +./reduce.at:1296: $PREPARSER ./input stderr: -209. conflicts.at:301: testing %nonassoc and eof ... -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ... -./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -129. output.at:328: ok -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +syntax error +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: input.y:7.1-9: error: useless precedence and associativity for U [-Werror=precedence] 7 | %nonassoc U @@ -6306,555 +6415,547 @@ input.y:2.1-11: error: useless precedence for Z [-Werror=precedence] 2 | %precedence Z | ^~~~~~~~~~~ -stdout: -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none -./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error - -./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: -stdout: -./reduce.at:1027: $PREPARSER ./input -stderr: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:1627: $PREPARSER ./input input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] -stderr: -stderr: -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... -136. output.at:341: ok -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +194. reduce.at:1296: ok +./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 -193. reduce.at:1027: ok -198. reduce.at:1627: ok -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error stdout: -./reduce.at:550: $PREPARSER ./input +./reduce.at:1296: $PREPARSER ./input +213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... +./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error stderr: - -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -stdout: -./reduce.at:1027: $PREPARSER ./input -stderr: -185. reduce.at:550: ok -stdout: -214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... -stderr: syntax error -213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... ./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +195. reduce.at:1296: ok +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none + +213. conflicts.at:523: ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none + skipped (conflicts.at:523) 215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... -./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y ./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -137. output.at:342: ok -190. reduce.at:1027: ok - -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none - - -./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -131. output.at:336: ok -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -206. conflicts.at:183: ./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - ok -218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... -./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... - -./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -213. conflicts.at:523: 133. output.at:338: ok -./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - skipped (conflicts.at:523) - -219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... -./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... -./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none +216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none - -221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... -./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... -./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none +./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +206. conflicts.at:183: ok ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -207. conflicts.at:218: ok +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... +217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... +./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stderr: -stdout: -stdout: +207. conflicts.at:218: ok +./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... +./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] 4 | e: 'e' | %empty; | ^~~~~~ +./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error stderr: stdout: -./reduce.at:1296: $PREPARSER ./input -./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:368: $PREPARSER ./input '0<0' stderr: -syntax error -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -194. reduce.at:1296: ok - -stderr: -stdout: -./conflicts.at:368: $PREPARSER ./input '0<0' -stderr: -224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... -./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:368: $PREPARSER ./input '0<0<0' -134. output.at:339: stderr: - ok stderr: syntax error, unexpected '<' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:1296: $PREPARSER ./input -stderr: +./conflicts.at:368: $PREPARSER ./input '0>0' stdout: stderr: ./conflicts.at:509: $PREPARSER ./input -syntax error -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error, unexpected end of file -./conflicts.at:368: $PREPARSER ./input '0>0' -./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -stdout: -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -195. reduce.at:1296: ok -210. conflicts.at:509: ok ./conflicts.at:368: $PREPARSER ./input '0>0>0' +./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none syntax error, unexpected '>' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - +210. conflicts.at:509: ok ./conflicts.at:368: $PREPARSER ./input '0<0>0' -225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: syntax error, unexpected '>' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -135. output.at:340: ok + ./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y -226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... -./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... -./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... +./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none - ./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -208. conflicts.at:275: ./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - ok -./conflicts.at:535: $PREPARSER ./input +./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -stderr: ./conflicts.at:530: $PREPARSER ./input - stderr: syntax error, unexpected end of file, expecting 'a' or 'b' ./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +208. conflicts.at:275: ok +214. conflicts.at:530: ok + + +stderr: +stdout: +220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... +./conflicts.at:535: $PREPARSER ./input +221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... +./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: syntax error, unexpected end of file, expecting 'a' or 'b' ./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -132. output.at:337: ok -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror 215. conflicts.at:535: ok -229. conflicts.at:764: testing Unresolved SR Conflicts ... -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -214. conflicts.at:530: ok -stderr: +stderr: stdout: - ./conflicts.at:540: $PREPARSER ./input stderr: syntax error, unexpected end of file, expecting 'a' or 'b' ./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -230. conflicts.at:887: testing Resolved SR Conflicts ... -./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -231. conflicts.at:989: testing %precedence suffices ... -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror -./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 216. conflicts.at:540: ok -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error -232. conflicts.at:1015: testing %precedence does not suffice ... -./conflicts.at:901: cat input.output -stderr: -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... +./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... +./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -230. conflicts.at:887: ok stdout: +./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:546: $PREPARSER ./input -stdout: - -231. conflicts.at:989: stderr: -./conflicts.at:551: $PREPARSER ./input - ok +stderr: syntax error, unexpected end of file, expecting 'b' ./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +217. conflicts.at:546: ok + +224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... +./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./conflicts.at:551: $PREPARSER ./input stderr: syntax error, unexpected end of file, expecting 'b' ./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./conflicts.at:372: $PREPARSER ./input '0<0' +218. conflicts.at:551: ok +stderr: +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:372: $PREPARSER ./input '0<0<0' +stderr: +syntax error, unexpected '<', expecting end of file +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -217. conflicts.at:546: 218. conflicts.at:551: ok - ok -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none - - - +./conflicts.at:372: $PREPARSER ./input '0>0' stderr: -input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:372: $PREPARSER ./input '0>0>0' stderr: -233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +syntax error, unexpected '>', expecting end of file +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:372: $PREPARSER ./input '0<0>0' +stderr: +syntax error, unexpected '>', expecting end of file +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: +./conflicts.at:513: $PREPARSER ./input +stderr: +syntax error, unexpected end of file +./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +211. conflicts.at:513: ok + +226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... +./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: stdout: ./conflicts.at:622: $PREPARSER ./input -234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: syntax error, unexpected 'b' -./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS -236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... -237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error 221. conflicts.at:622: ok -stdout: -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... +./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: +./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: ./conflicts.at:632: $PREPARSER ./input stderr: syntax error, unexpected 'b' ./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror 223. conflicts.at:632: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror stderr: +stdout: + +./conflicts.at:381: $PREPARSER ./input '0<0' +stderr: +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: $PREPARSER ./input '0<0<0' +stderr: +syntax error, unexpected '<', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: $PREPARSER ./input '0>0' +228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +stderr: +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: $PREPARSER ./input '0>0>0' +stderr: +syntax error, unexpected '>', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:381: $PREPARSER ./input '0<0>0' +stderr: +syntax error, unexpected '>', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y +stderr: +stdout: +./conflicts.at:638: $PREPARSER ./input +stderr: +syntax error, unexpected end of file, expecting 'a' +./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +224. conflicts.at:638: ok +stderr: +stdout: +./conflicts.at:642: $PREPARSER ./input +stderr: +syntax error, unexpected end of file, expecting 'a' +./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror + +225. conflicts.at:642: ok + +./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +229. conflicts.at:764: testing Unresolved SR Conflicts ... +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +230. conflicts.at:887: testing Resolved SR Conflicts ... +./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +./conflicts.at:901: cat input.output +230. conflicts.at:887: ok + +231. conflicts.at:989: testing %precedence suffices ... +./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error +231. conflicts.at:989: ok + +232. conflicts.at:1015: testing %precedence does not suffice ... +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +./conflicts.at:647: $PREPARSER ./input +stderr: +syntax error, unexpected 'b' +stderr: +./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./conflicts.at:626: $PREPARSER ./input +stderr: +syntax error, unexpected 'b' +./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +226. conflicts.at:647: ok +222. conflicts.at:626: ok + + +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... +233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:780: cat input.output +229. conflicts.at:764: ok +stderr: +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other] -238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror ./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2 +stderr: ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -239. conflicts.at:1264: testing %expect not enough ... -./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y stderr: -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./conflicts.at:518: $PREPARSER ./input +stdout: +./conflicts.at:651: $PREPARSER ./input stderr: -239. conflicts.at:1264: ok -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror stderr: -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... +syntax error, unexpected end of file +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +syntax error, unexpected end of file +./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +212. conflicts.at:518: ok +227. conflicts.at:651: ok +stderr: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] + +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none -240. conflicts.at:1284: testing %expect right ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stderr: +stdout: +./conflicts.at:388: $PREPARSER ./input '0<0' +stderr: +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: $PREPARSER ./input '0<0<0' stderr: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] stderr: +syntax error, unexpected '<', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:388: $PREPARSER ./input '0>0' +stderr: +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:388: $PREPARSER ./input '0>0>0' +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none stderr: -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] -stdout: -./conflicts.at:372: $PREPARSER ./input '0<0' +syntax error, unexpected '>', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: $PREPARSER ./input '0<0>0' stderr: -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -130. output.at:335: ok -./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:372: $PREPARSER ./input '0<0<0' +syntax error, unexpected '>', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +209. conflicts.at:301: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none + +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +232. conflicts.at:1015: ok ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -syntax error, unexpected '<', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -./conflicts.at:372: $PREPARSER ./input '0>0' +238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error + stderr: stdout: -./conflicts.at:638: $PREPARSER ./input -stderr: -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:732: $PREPARSER ./input stderr: -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:372: $PREPARSER ./input '0>0>0' +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +239. conflicts.at:1264: testing %expect not enough ... +./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -240. conflicts.at:1284: 224. conflicts.at:638: ok - ok +syntax error, unexpected 'a', expecting 'b' +./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +239. conflicts.at:1264: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:780: cat input.output -./conflicts.at:372: $PREPARSER ./input '0<0>0' +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +240. conflicts.at:1284: testing %expect right ... +./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -syntax error, unexpected '>', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -229. conflicts.at:764: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none - +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Werror +./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error +240. conflicts.at:1284: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 241. conflicts.at:1301: testing %expect too much ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none ./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=error +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none 241. conflicts.at:1301: ok -242. conflicts.at:1321: testing %expect with reduce conflicts ... +./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +242. conflicts.at:1321: testing %expect with reduce conflicts ... ./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +242. conflicts.at:1321: ok +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none + 243. conflicts.at:1341: testing %expect in grammar rule not enough ... ./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -243. conflicts.at:1341: - ok -242. conflicts.at:1321: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +243. conflicts.at:1341: ok +./conflicts.at:1145: cat input.output + +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=none -Werror --trace=none +238. conflicts.at:1127: ok 244. conflicts.at:1360: testing %expect in grammar rule right ... ./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none - 245. conflicts.at:1377: testing %expect in grammar rules ... ./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y +244. conflicts.at:1360: ok +./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: stdout: -./conflicts.at:651: $PREPARSER ./input -./conflicts.at:642: $PREPARSER ./input +./conflicts.at:1096: $PREPARSER ./input + stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +233. conflicts.at:1096: ok 246. conflicts.at:1396: testing %expect in grammar rule too much ... ./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -syntax error, unexpected end of file, expecting 'a' -stderr: -./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of file -./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -232. conflicts.at:1015: ok + +245. conflicts.at:1377: ok +246. conflicts.at:1396: ok 247. conflicts.at:1415: testing %expect-rr in grammar rule ... ./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -246. conflicts.at:1396: ok -225. conflicts.at:642: ok -227. conflicts.at:651: ok -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none - - -244. conflicts.at:1360: ./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - ok -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -stderr: 248. conflicts.at:1440: testing %expect-rr too much in grammar rule ... +./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y 249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ... ./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y +248. conflicts.at:1440: ok +249. conflicts.at:1469: ok + +247. conflicts.at:1415: ok + 250. conflicts.at:1498: testing %prec with user string ... -./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y ./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -./conflicts.at:513: $PREPARSER ./input -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + 251. conflicts.at:1515: testing %no-default-prec without %prec ... ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y -stderr: -syntax error, unexpected end of file -245. conflicts.at:1377: ./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -stderr: -248. conflicts.at:1440: ok -stdout: 252. conflicts.at:1544: testing %no-default-prec with %prec ... ./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -211. conflicts.at:513: ok -249. conflicts.at:1469: ./conflicts.at:647: $PREPARSER ./input +stderr: +stdout: +250. conflicts.at:1498: ./conflicts.at:558: $PREPARSER ./input ok stderr: -syntax error, unexpected 'b' - -247. conflicts.at:1415: ok -./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1145: cat input.output -./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -226. conflicts.at:647: ok - +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +219. conflicts.at:558: ok +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -238. conflicts.at:1127: - ok +252. conflicts.at:1544: ok 253. conflicts.at:1568: testing %default-prec ... - ./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -250. conflicts.at:1498: ok -255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... -254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ... -./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y -256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ... -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y +254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ... ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -257. conflicts.at:2299: testing %expect-rr non GLR ... -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -252. conflicts.at:1544: ok -258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y - -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y - -259. counterexample.at:43: testing Unifying S/R ... -./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... +./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +stderr: +input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] +input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] +./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error 253. conflicts.at:1568: ok -261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... -./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -260. counterexample.at:83: testing Deep Unifying S/R ... -./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stdout: +./conflicts.at:564: $PREPARSER ./input +stderr: + stderr: input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror -stderr: -input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] -input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror - -255. conflicts.at:1855: ./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +255. conflicts.at:1855: 220. conflicts.at:564: ok ok -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror +256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ... +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B C - Shift derivation - s - `-> 2: y c - `-> 8: A . B `-> 4: C - Reduce derivation - s - `-> 1: a x - `-> 3: A . `-> 6: B C -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... -./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -stdout: -./conflicts.at:732: $PREPARSER ./input -stderr: -syntax error, unexpected 'a', expecting 'b' -./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... -./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y + +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +257. conflicts.at:2299: testing %expect-rr non GLR ... +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y +258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y stderr: input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] @@ -6866,90 +6967,23 @@ input.y:25.16: error: rule useless in parser due to conflicts [-Werror=other] input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other] input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other] -stderr: -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B - Shift derivation - s - `-> 2: A xby - `-> 9: . B - Reduce derivation - s - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 4: %empty . `-> 6: %empty -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - First example: A X . B Y $end - Shift derivation - $accept - `-> 0: s $end - `-> 2: A xby - `-> 10: X xby Y - `-> 9: . B - Second example: A X . B y $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 5: X x - `-> 4: %empty . -input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -stderr: +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none ./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2 -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B C - Shift derivation - s - `-> 1: ac - `-> 3: A ac C - `-> 4: b - `-> 5: . B - Reduce derivation - s - `-> 2: a bc - `-> 7: A . `-> 10: B C -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A A . B B C C - Shift derivation - s - `-> 1: ac - `-> 3: A ac C - `-> 3: A ac C - `-> 4: b - `-> 6: . b - `-> 5: B B - Reduce derivation - s - `-> 2: a bc - `-> 8: A a `-> 9: B bc C - `-> 7: A . `-> 10: B C -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror stderr: -./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 -./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stdout: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:626: $PREPARSER ./input +251. conflicts.at:1515: ok +./conflicts.at:743: $PREPARSER ./input stderr: -syntax error, unexpected 'b' -./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error -222. conflicts.at:626: stderr: - ok +syntax error, unexpected 'a', expecting 'b' or 'c' +./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y + +stderr: +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none bison (GNU Bison) 3.8.2 init: 0.000000 # state items: 26 @@ -7228,26 +7262,57 @@ `-> 13: %empty . +stderr: +1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error +stderr: +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error +259. counterexample.at:43: testing Unifying S/R ... +./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example: A . A B $end +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B C Shift derivation - $accept - `-> 0: s $end - `-> 1: t - `-> 4: y - `-> 6: A . A B - Second example: A . A $end + s + `-> 2: y c + `-> 8: A . B `-> 4: C Reduce derivation - $accept - `-> 0: s $end - `-> 2: s t - `-> 1: t `-> 3: x - `-> 3: x `-> 5: A - `-> 5: A . + s + `-> 1: a x + `-> 3: A . `-> 6: B C +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none +./conflicts.at:1651: cat input.output +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none +./conflicts.at:1836: cat input.y >> input-keep.y +stderr: +stdout: +./conflicts.at:1096: $PREPARSER ./input +./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y +stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none +234. conflicts.at:1096: ok stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7255,11 +7320,114 @@ Shift derivation s -> [ y -> [ A . B ] c -> [ C ] ] Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] - ./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error +259. counterexample.at:43: ok +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none + + +260. counterexample.at:83: testing Deep Unifying S/R ... +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y +./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... +./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +256. conflicts.at:1935: ok + +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror stderr: -./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B C + Shift derivation + s + `-> 1: ac + `-> 3: A ac C + `-> 4: b + `-> 5: . B + Reduce derivation + s + `-> 2: a bc + `-> 7: A . `-> 10: B C +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A A . B B C C + Shift derivation + s + `-> 1: ac + `-> 3: A ac C + `-> 3: A ac C + `-> 4: b + `-> 6: . b + `-> 5: B B + Reduce derivation + s + `-> 2: a bc + `-> 8: A a `-> 9: B bc C + `-> 7: A . `-> 10: B C +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] +input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] +./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror +262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... +./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B + Shift derivation + s + `-> 2: A xby + `-> 9: . B + Reduce derivation + s + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 4: %empty . `-> 6: %empty +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + First example: A X . B Y $end + Shift derivation + $accept + `-> 0: s $end + `-> 2: A xby + `-> 10: X xby Y + `-> 9: . B + Second example: A X . B y $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 5: X x + `-> 4: %empty . +input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error +./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none +stderr: +2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] +./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error +stderr: +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 +stderr: +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] First example: B . C $end @@ -7278,14 +7446,20 @@ `-> 7: B . `-> 8: C D input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A . B C + Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] + Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A A . B B C C + Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] + Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr ./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Werror -259. counterexample.at:43: ok -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none -264. counterexample.at:298: testing S/R after first token ... stderr: input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7299,37 +7473,19 @@ Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ] input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none -stderr: -261. counterexample.at:144: - ok -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A . B C - Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] - Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A A . B B C C - Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] - Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr 260. counterexample.at:83: ok +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none +./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +261. counterexample.at:144: ok -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none -265. counterexample.at:363: testing Unifying R/R counterexample ... -./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example A . A B $end - Shift derivation $accept -> [ s -> [ t -> [ y -> [ A . A B ] ] ] $end ] - Second example A . A $end - Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ] -./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... +./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +264. counterexample.at:298: testing S/R after first token ... +./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] @@ -7339,21 +7495,31 @@ Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -251. conflicts.at:1515: ok -stderr: -263. counterexample.at:254: ok -266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none 262. counterexample.at:207: ok +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example: A . A B $end + Shift derivation + $accept + `-> 0: s $end + `-> 1: t + `-> 4: y + `-> 6: A . A B + Second example: A . A $end + Reduce derivation + $accept + `-> 0: s $end + `-> 2: s t + `-> 1: t `-> 3: x + `-> 3: x `-> 5: A + `-> 5: A . +./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 - -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none - -267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... +./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +265. counterexample.at:363: testing Unifying R/R counterexample ... +./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none stderr: input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] @@ -7383,26 +7549,18 @@ input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=error -stdout: -./conflicts.at:381: $PREPARSER ./input '0<0' ./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +257. conflicts.at:2299: ok +254. conflicts.at:1592: ok + stderr: -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:381: $PREPARSER ./input '0<0<0' -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none -stderr: -268. counterexample.at:488: testing Cex Search Prepend ... -./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -syntax error, unexpected '<', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -269. counterexample.at:550: testing R/R cex with prec ... -270. counterexample.at:610: testing Null nonterminals ... -./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example A . A B $end + Shift derivation $accept -> [ s -> [ t -> [ y -> [ A . A B ] ] ] $end ] + Second example A . A $end + Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ] +./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr stderr: input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] @@ -7415,14 +7573,45 @@ `-> 1: A b `-> 3: b . input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:381: $PREPARSER ./input '0>0' ./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none + +263. counterexample.at:254: ok +./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y +266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... +./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + Example b . A X X Y + Shift derivation a -> [ s -> [ b . xx -> [ A X X ] y -> [ Y ] ] ] + Reduce derivation a -> [ r -> [ b . ] t -> [ A x -> [ X ] xy -> [ X Y ] ] ] +input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] + First example A X . X + Shift derivation a -> [ t -> [ A xx -> [ X . X ] ] ] + Second example X . X xy + Reduce derivation a -> [ x -> [ X . ] t -> [ X xy ] ] +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] + +./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... +./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +264. counterexample.at:298: ok +268. counterexample.at:488: testing Cex Search Prepend ... +./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + stderr: -./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stdout: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example A b . + First reduce derivation a -> [ A b . ] + Second reduce derivation a -> [ A b -> [ b . ] ] +input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +265. counterexample.at:363: ok +269. counterexample.at:550: testing R/R cex with prec ... +./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] @@ -7439,26 +7628,12 @@ `-> 4: B b A `-> 6: D . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:381: $PREPARSER ./input '0>0>0' ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -syntax error -stderr: -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1651: cat input.output -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror ./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:1836: cat input.y >> input-keep.y -./conflicts.at:381: $PREPARSER ./input '0<0>0' -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -233. conflicts.at:1096: ok -./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y + +270. counterexample.at:610: testing Null nonterminals ... +./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7489,38 +7664,21 @@ `-> 7: A . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - Example b . A X X Y - Shift derivation a -> [ s -> [ b . xx -> [ A X X ] y -> [ Y ] ] ] - Reduce derivation a -> [ r -> [ b . ] t -> [ A x -> [ X ] xy -> [ X Y ] ] ] -input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] - First example A X . X - Shift derivation a -> [ t -> [ A xx -> [ X . X ] ] ] - Second example X . X xy - Reduce derivation a -> [ x -> [ X . ] t -> [ X xy ] ] -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] - -./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr ./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Wnone,none -Werror --trace=none -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example A b . - First reduce derivation a -> [ A b . ] - Second reduce derivation a -> [ A b -> [ b . ] ] -input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -264. counterexample.at:298: ok -./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] stderr: -271. counterexample.at:797: testing Non-unifying Prefix Share ... -265. counterexample.at:363: input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] + First example D . A $end + First reduce derivation $accept -> [ s -> [ a -> [ D . ] A ] $end ] + Second example B D . A $end + Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 +stderr: +input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] Example: B . b c First reduce derivation @@ -7549,34 +7707,14 @@ `-> 4: C `-> 6: %empty `-> 7: A c A `-> 5: %empty . `-> 7: %empty +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error +266. counterexample.at:399: ./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ok -./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -256. conflicts.at:1935: ok ./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror -stderr: -input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] - First example D . A $end - First reduce derivation $accept -> [ s -> [ a -> [ D . ] A ] $end ] - Second example B D . A $end - Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr - -266. counterexample.at:399: ok -./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror -272. counterexample.at:842: testing Deep Null Unifying ... -./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror - -273. counterexample.at:884: testing Deep Null Non-unifying ... -./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: +271. counterexample.at:797: testing Non-unifying Prefix Share ... +./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example N A . B C @@ -7588,11 +7726,23 @@ Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -274. synclines.at:194: testing Prologue syncline ... -./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=none -Werror --trace=none 268. counterexample.at:488: ok +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none + +272. counterexample.at:842: testing Deep Null Unifying ... +./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example B . b c + First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] + Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example C . c b + First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] + Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] +./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] @@ -7607,39 +7757,18 @@ `-> 3: H i J J `-> 5: i J . input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] -275. synclines.at:214: testing %union syncline ... ./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 stderr: +stdout: +269. counterexample.at:550: ok +./conflicts.at:754: $PREPARSER ./input stderr: -input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example B . b c - First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] - Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example C . c b - First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] - Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] -input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] -input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +syntax error, unexpected 'a', expecting 'b' or 'c' +./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +228. conflicts.at:676: ok -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error stderr: -269. counterexample.at:550: ./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] Example: A a . D @@ -7653,14 +7782,46 @@ `-> 3: b `-> 6: D `-> 4: c `-> 5: %empty . - ok ./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr + +./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +273. counterexample.at:884: testing Deep Null Non-unifying ... +./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +274. synclines.at:194: testing Prologue syncline ... +./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] + Example H i J . J J + Shift derivation s -> [ a -> [ H i J . J ] J ] + Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] +input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +271. counterexample.at:797: ok +stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stdout: +./conflicts.at:1096: $PREPARSER ./input +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + Example A a . D + Shift derivation s -> [ A a d -> [ . D ] ] + Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr + +stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +272. counterexample.at:842: ok +235. conflicts.at:1096: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +275. synclines.at:214: testing %union syncline ... + +./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] First example: A a . D $end @@ -7677,15 +7838,13 @@ `-> 3: b `-> 6: D `-> 4: c `-> 5: %empty . +./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c + ./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error -276. synclines.at:237: testing %union name syncline ... -./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" @@ -7717,27 +7876,23 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c +276. synclines.at:237: testing %union name syncline ... +./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: -./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +277. synclines.at:264: testing Postprologue syncline ... +./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y syncline.c:4: #error "4" ./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example H i J . J J - Shift derivation s -> [ a -> [ H i J . J ] J ] - Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] -input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none -stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +input.y:2:2: error: #error "2" + 2 | #error "2" | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7764,17 +7919,16 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -277. synclines.at:264: testing Postprologue syncline ... -./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -syncline.c:4: #error "4" -stderr: -./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stderr: -input.y:2:2: error: #error "2" - 2 | #error "2" +input.y:2: #error "2" +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" | ^~~~~ -./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:194: cat stdout +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7801,26 +7955,13 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c -stdout: -input.y:2: #error "2" -./synclines.at:194: cat stdout -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - Example A a . D - Shift derivation s -> [ A a d -> [ . D ] ] - Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -271. counterexample.at:797: ok -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c -stdout: 274. synclines.at:194: ok -./output.at:836: $PREPARSER ./parser -272. counterexample.at:842: ok -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none +stdout: +syncline.c:4: #error "4" +./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] First example A a . D $end @@ -7828,18 +7969,17 @@ Second example A a . D E $end Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] ./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file - -stderr: +273. counterexample.at:884: ok stderr: -input.y:2:2: error: #error "2" - 2 | #error "2" - | ^~~~~ syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7867,6 +8007,11 @@ EOF stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ ./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7894,35 +8039,19 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none -./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -input.y:2: #error "2" -./synclines.at:214: cat stdout -273. counterexample.at:884: stdout: - ok +syncline.c:4: #error "4" +./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +stdout: syncline.c:4: #error "4" ./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -275. synclines.at:214: ok -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c -279. synclines.at:310: testing Epilogue syncline ... -278. synclines.at:291: testing Action syncline ... -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c - -./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none -./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c -280. synclines.at:327: testing %code top syncline ... -254. conflicts.at:1592: ok -./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +input.y:2:2: error: #error "2" + 2 | #error "2" | ^~~~~ -./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7949,15 +8078,21 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c stdout: -syncline.c:4: #error "4" -./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -281. synclines.at:346: testing %destructor syncline ... -./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input.y:2: #error "2" +278. synclines.at:291: testing Action syncline ... +./synclines.at:214: cat stdout +./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +279. synclines.at:310: testing Epilogue syncline ... +./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +275. synclines.at:214: ok stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +input.y:13:2: error: #error "13" + 13 | #error "13" | ^~~~~ ./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. @@ -7986,12 +8121,11 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -syncline.c:4: #error "4" -./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c stderr: +input.y:13: #error "13" +./synclines.at:264: cat stdout input.y:1:7: error: expected '{' before 'break' 1 | %union break | ^~~~~ @@ -8160,8 +8294,10 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +277. synclines.at:264: ok stdout: -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c input.y:1: #error expected '{' before 'break' %union break ^~~~~ @@ -8301,16 +8437,24 @@ yydestruct ("Error: discarding", ^~~~~~~~~~ ./synclines.at:255: grep '^input.y:1' stdout -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c stdout: input.y:1: #error expected '{' before 'break' input.y:1: #error expected '{' before 'break' -stderr: +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file 276. synclines.at:237: ok -input.y:13:2: error: #error "13" - 13 | #error "13" + +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c +280. synclines.at:327: testing %code top syncline ... +./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c + +stderr: +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" | ^~~~~ -./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +281. synclines.at:346: testing %destructor syncline ... +./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8337,28 +8481,13 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror -stdout: -input.y:13: #error "13" -./synclines.at:264: cat stdout -144. output.at:744: ok - -stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error -282. synclines.at:370: testing %printer syncline ... -./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c - +./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -283. synclines.at:440: testing syncline escapes: yacc.c ... -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8385,13 +8514,31 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +stdout: +syncline.c:4: #error "4" +./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +stdout: +syncline.c:4: #error "4" +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +282. synclines.at:370: testing %printer syncline ... +./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: $PREPARSER ./input +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c +stderr: +input.y: In function 'yyparse': +input.y:8:2: error: #error "8" + 8 | #error "8" | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +236. conflicts.at:1096: ok +./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8418,10 +8565,6 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stdout: -syncline.c:4: #error "4" -./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c stderr: input.y:8:2: error: #error "8" 8 | #error "8" @@ -8455,33 +8598,18 @@ stdout: input.y:8: #error "8" -./synclines.at:310: cat stdout -279. synclines.at:310: ok - -284. synclines.at:440: testing syncline escapes: glr.c ... -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -stdout: -syncline.c:4: #error "4" -./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c -stderr: stdout: -./conflicts.at:1096: $PREPARSER ./input +input.y:8: #error "8" +./synclines.at:310: cat stdout +./synclines.at:291: cat stdout stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none -234. conflicts.at:1096: ok - ./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c -stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8508,42 +8636,21 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -286. synclines.at:440: testing syncline escapes: glr.cc ... -stderr: -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -285. synclines.at:440: testing syncline escapes: lalr1.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -257. conflicts.at:2299: ok - -277. synclines.at:264: ok - - -287. synclines.at:440: testing syncline escapes: glr2.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -288. synclines.at:497: testing %no-lines: yacc.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -289. synclines.at:497: testing %no-lines: glr.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -stderr: -stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -stderr: -stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +278. synclines.at:291: ok +279. synclines.at:310: ok stdout: syncline.c:4: #error "4" -./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:497: mv input.c without.c -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:497: mv input.h without.h -./synclines.at:497: grep '#line' *.c *.h -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c + stderr: -input.y: In function 'yydestruct': -input.y:2:2: error: #error "2" - 2 | #error "2" +283. synclines.at:440: testing syncline escapes: yacc.c ... +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" | ^~~~~ +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 ./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8571,32 +8678,17 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stdout: -input.y:2: #error "2" -./synclines.at:346: cat stdout -281. synclines.at:346: ok -./synclines.at:497: mv input.c with.c -./synclines.at:497: mv input.h with.h -./synclines.at:497: grep -v '#line' with.c >expout -./synclines.at:497: cat without.c -./synclines.at:497: grep -v '#line' with.h >expout -./synclines.at:497: cat without.h -290. synclines.at:497: testing %no-lines: lalr1.cc ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -288. synclines.at:497: ok -stderr: stdout: -./conflicts.at:743: $PREPARSER ./input -stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -input.y:2:2: error: #error "2" - 2 | #error "2" +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" | ^~~~~ -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +syncline.c:4: #error "4" +./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8623,69 +8715,12 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stdout: -input.y:2: #error "2" -./synclines.at:327: cat stdout -280. synclines.at:327: ok - -291. synclines.at:497: testing %no-lines: glr.cc ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -stderr: -stdout: -283. synclines.at:440: ok - -292. synclines.at:497: testing %no-lines: glr2.cc ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -stderr: -stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -stderr: -stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./synclines.at:497: mv input.cc with.cc -./synclines.at:497: mv input.hh with.hh -./synclines.at:497: grep -v '#line' with.cc >expout -./synclines.at:497: cat without.cc -./synclines.at:497: grep -v '#line' with.hh >expout -./synclines.at:497: mv input.c without.c -./synclines.at:497: cat without.hh -./synclines.at:497: mv input.h without.h -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -./synclines.at:497: grep '#line' *.c *.h -291. synclines.at:497: ok -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -./synclines.at:497: mv input.cc with.cc - -./synclines.at:497: mv input.hh with.hh -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -./synclines.at:497: grep -v '#line' with.cc >expout -./synclines.at:497: cat without.cc -./synclines.at:497: grep -v '#line' with.hh >expout -stderr: -./synclines.at:497: cat without.hh -stdout: -./conflicts.at:558: $PREPARSER ./input -292. synclines.at:497: stderr: - ok -293. synclines.at:507: testing Output columns ... -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS stderr: -input.y: In function 'yyparse': -input.y:8:2: error: #error "8" - 8 | #error "8" +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c +input.y:2:2: error: #error "2" + 2 | #error "2" | ^~~~~ -./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8712,15 +8747,27 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:497: mv input.c with.c -./synclines.at:497: mv input.h with.h - +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +284. synclines.at:440: testing syncline escapes: glr.c ... stdout: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +syncline.c:4: #error "4" +stdout: +./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +input.y:2: #error "2" +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:327: cat stdout +285. synclines.at:440: testing syncline escapes: lalr1.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +280. synclines.at:327: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +input.y: In function 'yydestruct': +input.y:2:2: error: #error "2" + 2 | #error "2" | ^~~~~ -./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8747,35 +8794,14 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -input.y:8: #error "8" -219. conflicts.at:558: ok -./synclines.at:291: cat stdout -./synclines.at:497: grep -v '#line' with.c >expout stdout: -./synclines.at:497: cat without.c -syncline.c:4: #error "4" -./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -278. synclines.at:291: ok -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:497: grep -v '#line' with.h >expout - -./synclines.at:497: cat without.h -294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... -289. synclines.at:497: - ok - -./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +input.y:2: #error "2" +./synclines.at:346: cat stdout -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none -295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... -297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... -296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... -stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +281. synclines.at:346: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: -./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ - -e '/input.c/s/ [0-9]* / LINE /;' \ - -e 'p;}' \ - input.c input.y: In function 'yy_symbol_value_print': input.y:2:2: error: #error "2" 2 | #error "2" @@ -8807,300 +8833,406 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +stderr: + +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y -./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y stdout: input.y:2: #error "2" +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +286. synclines.at:440: testing syncline escapes: glr.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 ./synclines.at:370: cat stdout -293. synclines.at:507: ok -298. headers.at:67: testing export YYLTYPE ... -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +282. synclines.at:370: ok +287. synclines.at:440: testing syncline escapes: glr2.cc ... +stdout: +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +288. synclines.at:497: testing %no-lines: yacc.c ... +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +stdout: +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +stdout: +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./synclines.at:497: mv input.c without.c +./synclines.at:497: mv input.h without.h +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +./synclines.at:497: grep '#line' *.c *.h +stdout: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: mv input.c with.c +./synclines.at:497: mv input.h with.h +stderr: +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +stdout: +./synclines.at:497: grep -v '#line' with.c >expout +283. synclines.at:440: ok +./synclines.at:497: cat without.c +./synclines.at:497: grep -v '#line' with.h >expout +./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./synclines.at:497: cat without.h + +288. synclines.at:497: ok + +289. synclines.at:497: testing %no-lines: glr.c ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +290. synclines.at:497: testing %no-lines: lalr1.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./synclines.at:497: mv input.c without.c +./synclines.at:497: mv input.h without.h +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: grep '#line' *.c *.h +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y ./synclines.at:497: mv input.cc without.cc -./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c -282. synclines.at:370: ./synclines.at:497: mv input.hh without.hh - ok +./synclines.at:497: mv input.hh without.hh ./synclines.at:497: grep '#line' *.cc *.hh - ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: mv input.c with.c +./synclines.at:497: mv input.h with.h +./synclines.at:497: grep -v '#line' with.c >expout +./synclines.at:497: cat without.c +./synclines.at:497: mv input.cc with.cc +./synclines.at:497: mv input.hh with.hh +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: grep -v '#line' with.h >expout +./synclines.at:497: cat without.h +./synclines.at:497: cat without.cc +289. synclines.at:497: ok +./synclines.at:497: grep -v '#line' with.hh >expout +./synclines.at:497: cat without.hh +290. synclines.at:497: ok -./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c -./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -299. headers.at:177: testing Sane headers: ... -./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +291. synclines.at:497: testing %no-lines: glr.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +292. synclines.at:497: testing %no-lines: glr2.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: grep '#line' *.cc *.hh +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: grep '#line' *.cc *.hh +./synclines.at:497: mv input.cc with.cc stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 9.000000 + First example: H i . J K $end + Shift derivation + $accept + `-> 0: a $end + `-> 2: H i + `-> 4: i . J K + Second example: H i . J $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: a J + `-> 2: H i . +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -stdout: -./conflicts.at:518: $PREPARSER ./input -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: mv input.hh with.hh +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: cat without.cc +./synclines.at:497: grep -v '#line' with.hh >expout +./synclines.at:497: cat without.hh +291. synclines.at:497: ok +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file + +293. synclines.at:507: testing Output columns ... +./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y stderr: -syntax error, unexpected end of file -./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +284. synclines.at:440: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ + -e '/input.c/s/ [0-9]* / LINE /;' \ + -e 'p;}' \ + input.c +294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./synclines.at:497: mv input.cc with.cc -300. headers.at:178: testing Sane headers: %locations %debug ... -./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +293. synclines.at:507: ok +./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./synclines.at:497: mv input.hh with.hh -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror ./synclines.at:497: grep -v '#line' with.cc >expout -212. conflicts.at:518: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + ./synclines.at:497: cat without.cc ./synclines.at:497: grep -v '#line' with.hh >expout +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./synclines.at:497: cat without.hh - -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stderr: -290. synclines.at:497: ok -input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... +./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:85: sed 's,.*/$,,' stderr 1>&2 +292. synclines.at:497: ok +./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -301. headers.at:180: testing Sane headers: %glr-parser ... -./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error -stderr: ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... +./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -294. headers.at:56: ok stderr: stdout: -302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... -./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:388: $PREPARSER ./input '0<0' -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +295. headers.at:57: ok stderr: -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:388: $PREPARSER ./input '0<0<0' +stdout: +294. headers.at:56: ok + stderr: -syntax error, unexpected '<', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 6.000000 + First example H i . J K $end + Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] + Second example H i . J $end + Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... +./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +267. counterexample.at:441: ok + +298. headers.at:67: testing export YYLTYPE ... +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror +285. synclines.at:440: ok + +299. headers.at:177: testing Sane headers: ... +./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +stderr: +input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:85: sed 's,.*/$,,' stderr 1>&2 +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error + ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error -./conflicts.at:388: $PREPARSER ./input '0>0' +258. conflicts.at:2331: ok +300. headers.at:178: testing Sane headers: %locations %debug ... +./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none +301. headers.at:180: testing Sane headers: %glr-parser ... +./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: +./conflicts.at:1096: $PREPARSER ./input stderr: -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -295. headers.at:57: ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - ok +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +237. conflicts.at:1096: ok ./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:388: $PREPARSER ./input '0>0>0' +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c + stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stdout: +299. headers.at:177: ok +./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c -303. headers.at:183: testing Sane headers: api.pure ... -./conflicts.at:388: $PREPARSER ./input '0<0>0' +302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... +./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stderr: -syntax error, unexpected '>', expecting end of file +stdout: +303. headers.at:183: testing Sane headers: api.pure ... ./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +297. headers.at:59: ok +stderr: +stdout: + ./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -209. conflicts.at:301: ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file - ok -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c 304. headers.at:184: testing Sane headers: api.push-pull=both ... ./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stdout: -305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... -./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:564: $PREPARSER ./input -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS stderr: -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none -220. conflicts.at:564: ok +stdout: +./headers.at:105: $PREPARSER ./caller +stderr: +./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +298. headers.at:67: ok -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -306. headers.at:187: testing Sane headers: c++ ... -./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y stderr: stdout: -./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... +./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stderr: stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: stdout: -./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS -stdout: -299. headers.at:177: ok - stderr: stdout: -./headers.at:105: $PREPARSER ./caller +303. headers.at:183: ok stderr: -./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -298. headers.at:67: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +300. headers.at:178: ok +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stdout: +296. headers.at:58: ok + + -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... -./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +287. synclines.at:440: ok +306. headers.at:187: testing Sane headers: c++ ... 307. headers.at:188: testing Sane headers: %locations %debug c++ ... ./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y + +308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... +./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y stderr: stdout: -305. headers.at:185: ok - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc 309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ... ./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y stderr: stdout: -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +304. headers.at:184: ok +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc stderr: stdout: -304. headers.at:184: ok +286. synclines.at:440: ok + +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +311. actions.at:24: testing Midrule actions ... ./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -stderr: -stdout: -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y 310. headers.at:199: testing Several parsers ... ./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y +./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: stdout: -303. headers.at:183: ok +305. headers.at:185: ok + +312. actions.at:72: testing Typed midrule actions ... +./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stdout: -./conflicts.at:1096: $PREPARSER ./input - +./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -235. conflicts.at:1096: ok +stdout: stderr: stdout: -300. headers.at:178: ok - -311. actions.at:24: testing Midrule actions ... -./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c +301. headers.at:180: ok 313. actions.at:122: testing Implicitly empty rule ... ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror -312. actions.at:72: testing Typed midrule actions ... -./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +stderr: +1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] + 11 | a: /* empty. */ {}; + | ^~ + | %empty +1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./actions.at:133: sed 's,.*/$,,' stderr 1>&2 stderr: stdout: -296. headers.at:58: ok +302. headers.at:181: ok 314. actions.at:172: testing Invalid uses of %empty ... ./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret one.y ./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -u one.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -stderr: -stdout: -284. synclines.at:440: ok - -stderr: -stdout: -297. headers.at:59: ok -315. actions.at:240: testing Valid uses of %empty ... -./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:202: sed -e '1,8d' one.y +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error ./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none 314. actions.at:172: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] - 11 | a: /* empty. */ {}; - | ^~ - | %empty -1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./actions.at:133: sed 's,.*/$,,' stderr 1>&2 -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none +315. actions.at:240: testing Valid uses of %empty ... +./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: H i . J K $end - Shift derivation - $accept - `-> 0: a $end - `-> 2: H i - `-> 4: i . J K - Second example: H i . J $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: a J - `-> 2: H i . -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y -./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror - -317. actions.at:365: testing Initial location: yacc.c ... -./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: 2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] 11 | a: /* empty. */ {}; @@ -9112,28 +9244,19 @@ | %empty 2.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] ./actions.at:149: sed 's,.*/$,,' stderr 1>&2 -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error -./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y -313. actions.at:122: ok - -318. actions.at:366: testing Initial location: yacc.c api.pure=full ... -./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./conflicts.at:754: $PREPARSER ./input -./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:61: $PREPARSER ./input stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -228. conflicts.at:676: ok +./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./actions.at:260: $PREPARSER ./input +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error +stderr: +./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +311. actions.at:24: ok + 316. actions.at:270: testing Add missing %empty ... ./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update -Wall input.y stderr: @@ -9145,264 +9268,372 @@ bison: file 'input.y' was updated (backup: 'input.y~') ./actions.at:286: cat input.y ./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +315. actions.at:240: ok + +317. actions.at:365: testing Initial location: yacc.c ... +./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none +316. actions.at:270: ok +./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +318. actions.at:366: testing Initial location: yacc.c api.pure=full ... +./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: . c A A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 4: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . c A A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: b c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: b d . + First reduce derivation + a + `-> 1: b d . + Second reduce derivation + a + `-> 1: b d + `-> 7: d . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: c d . + First reduce derivation + a + `-> 2: c d . + Second reduce derivation + a + `-> 2: c d + `-> 7: d . +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c +stderr: +stdout: +./actions.at:111: $PREPARSER ./input +stderr: +./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +312. actions.at:72: ok 319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... ./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -316. actions.at:270: ok ./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +stdout: ./headers.at:320: echo "x1" >>expout +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc ./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file - ./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c +stderr: +stdout: +./actions.at:367: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +319. actions.at:367: ok + 320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... ./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc ./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -236. conflicts.at:1096: ok - -321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... -./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./actions.at:365: $PREPARSER ./input +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -1.1 -1.1: syntax error -./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -317. actions.at:365: ok +stdout: +./headers.at:321: echo "x2" >>expout stderr: stdout: ./actions.at:366: $PREPARSER ./input +./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y stderr: 1.1 1.1: syntax error ./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +306. headers.at:187: ok 318. actions.at:366: ok + + +321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... +./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +322. actions.at:370: testing Initial location: glr.c ... +./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c +./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:61: $PREPARSER ./input +./actions.at:368: $PREPARSER ./input stderr: -./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -311. actions.at:24: ok +1.1 +1.1: syntax error +./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +320. actions.at:368: ok + stderr: stdout: -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stdout: -302. headers.at:181: ok +323. actions.at:371: testing Initial location: glr.c api.pure ... +./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +313. actions.at:122: ok + stderr: stdout: -./actions.at:111: $PREPARSER ./input -stderr: -./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -312. actions.at:72: ok -./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - - - - - +./actions.at:369: $PREPARSER ./input 324. actions.at:372: testing Initial location: lalr1.cc ... ./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -322. actions.at:370: testing Initial location: glr.c ... -./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -326. actions.at:374: testing Initial location: glr2.cc ... -./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +1.1 +1.1: syntax error +./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +321. actions.at:369: ok + 325. actions.at:373: testing Initial location: glr.cc ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -323. actions.at:371: testing Initial location: glr.c api.pure ... -./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./actions.at:260: $PREPARSER ./input +./actions.at:365: $PREPARSER ./input stderr: -./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -315. actions.at:240: ok +1.1 +1.1: syntax error +./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +317. actions.at:365: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +326. actions.at:374: testing Initial location: glr2.cc ... +./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./headers.at:322: echo "x3" >>expout +./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y stderr: stdout: -301. headers.at:180: ok - -328. actions.at:394: testing Initial location: yacc.c api.pure=full ... -./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: stdout: -./headers.at:321: echo "x2" >>expout -./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: stdout: -./actions.at:367: $PREPARSER ./input +./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c +./actions.at:370: $PREPARSER ./input stderr: -./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 1.1 1.1: syntax error -./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -319. actions.at:367: ok +./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +322. actions.at:370: ok +stderr: +stdout: +./actions.at:371: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +323. actions.at:371: ok -./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -329. actions.at:478: testing Location print: yacc.c ... -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 327. actions.at:383: testing Initial location: yacc.c api.pure=full ... ./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -286. synclines.at:440: ok +./actions.at:383: $PREPARSER ./input +stderr: -./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -330. actions.at:478: testing Location print: glr.c ... +: syntax error +./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +327. actions.at:383: ok + + +329. actions.at:478: testing Location print: yacc.c ... +328. actions.at:394: testing Initial location: yacc.c api.pure=full ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -285. synclines.at:440: ok - -331. actions.at:478: testing Location print: lalr1.cc ... -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -stdout: ./actions.at:394: $PREPARSER ./input stderr: 0 0: syntax error ./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -328. actions.at:394: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -332. actions.at:478: testing Location print: glr.cc ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -./actions.at:369: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error +308. headers.at:189: ok + stderr: -./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./actions.at:478: $PREPARSER ./input -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -321. actions.at:369: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 329. actions.at:478: ok +331. actions.at:478: testing Location print: lalr1.cc ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +328. actions.at:394: ok +332. actions.at:478: testing Location print: glr.cc ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./actions.at:368: $PREPARSER ./input +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +./actions.at:372: $PREPARSER ./input stderr: 1.1 1.1: syntax error -./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +324. actions.at:372: ok + 333. actions.at:478: testing Location print: glr2.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -320. actions.at:368: ok - -258. conflicts.at:2331: ok -335. actions.at:1047: testing Printers and Destructors ... -./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - stderr: stdout: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +330. actions.at:478: testing Location print: glr.c ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:383: $PREPARSER ./input +309. headers.at:191: ok + 334. actions.at:488: testing Exotic Dollars ... ./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y stderr: - -: syntax error -./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -327. actions.at:383: ok -336. actions.at:1048: testing Printers and Destructors with union ... -./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - +stdout: +./headers.at:323: echo "x4" >>expout +./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y ./actions.at:533: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -337. actions.at:1050: testing Printers and Destructors: %glr-parser ... -./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example H i . J K $end - Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] - Second example H i . J $end - Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -267. counterexample.at:441: ok - -338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... -./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: +307. headers.at:188: ok + +335. actions.at:1047: testing Printers and Destructors ... +./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: ./actions.at:534: $PREPARSER ./input stderr: ./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -9410,44 +9641,18 @@ ./actions.at:562: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./headers.at:322: echo "x3" >>expout -./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y -stderr: -stdout: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: -./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c -stdout: -./actions.at:563: $PREPARSER ./input -stderr: -./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -334. actions.at:488: ok - -339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... -./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: ./actions.at:478: $PREPARSER ./input stderr: ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -330. actions.at:478: ok +331. actions.at:478: ok +336. actions.at:1048: testing Printers and Destructors with union ... +./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:371: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... -./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -323. actions.at:371: stdout: - ok ./actions.at:1048: $PREPARSER ./input '(x)' stderr: -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9460,11 +9665,12 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: +./actions.at:563: $PREPARSER ./input ./actions.at:1048: $PREPARSER ./input '!' stderr: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc - +stderr: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -9472,7 +9678,12 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. +./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +334. actions.at:488: ok + +337. actions.at:1050: testing Printers and Destructors: %glr-parser ... +./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1048: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) @@ -9485,10 +9696,10 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1048: $PREPARSER ./input '(y)' stderr: stdout: -./actions.at:1048: $PREPARSER ./input '(y)' -./actions.at:1047: $PREPARSER ./input '(x)' +./actions.at:478: $PREPARSER ./input stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -9503,28 +9714,15 @@ Freeing nterm input (2@0-29) Successful parse. stderr: +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -stdout: -stdout: -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:372: $PREPARSER ./input +332. actions.at:478: ok + +338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... +./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./actions.at:1047: $PREPARSER ./input '!' -341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9561,23 +9759,7 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -306. headers.at:187: ok -./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: $PREPARSER ./input '(x)(x)x' -./actions.at:1047: $PREPARSER ./input '!!!' -stderr: -1.1 -1.1: syntax error -stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9598,21 +9780,8 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. - -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' -324. actions.at:372: ok stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9658,9 +9827,78 @@ Freeing nterm line (0@0-29) Parsing FAILED (status 2). ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +336. actions.at:1048: ok + +339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... +./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./actions.at:374: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./actions.at:478: $PREPARSER ./input +326. actions.at:374: ok + +stderr: +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... +./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +330. actions.at:478: ok + +341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... +./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./headers.at:324: echo "x5" >>expout +./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c +stderr: +stdout: +./actions.at:1047: $PREPARSER ./input '(x)' +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1047: $PREPARSER ./input '!' +stderr: +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1047: $PREPARSER ./input '!!!' +stderr: +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1047: $PREPARSER ./input '(y)' -336. actions.at:1048: ok stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -9675,19 +9913,8 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... -./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: - -stdout: -./actions.at:370: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9724,14 +9951,8 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -322. actions.at:370: ok -stdout: ./actions.at:1047: $PREPARSER ./input '(x)(x)x' stderr: -343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9754,8 +9975,6 @@ ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' stderr: -344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... -./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9800,108 +10019,27 @@ Freeing nterm line (0@0-29) Parsing FAILED (status 2). ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - 335. actions.at:1047: ok -345. actions.at:1071: testing Default tagless %printer and %destructor ... -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... +./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:325: echo "x6" >>expout +./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y ./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stderr: -stderr: -input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -stdout: -./conflicts.at:1096: $PREPARSER ./input -stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -stderr: -stdout: -stderr: -input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:22.3-4: error: useless %printer for type <> [-Werror=other] -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -237. conflicts.at:1096: ok -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none - -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -stderr: -stdout: -347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y -287. synclines.at:440: ok -./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror -348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stderr: -input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -stderr: -./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 -stdout: -stderr: -stderr: +./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c stderr: stdout: -input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] -input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] -stdout: -./actions.at:478: $PREPARSER ./input -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error ./actions.at:373: $PREPARSER ./input -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 1.1 1.1: syntax error ./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -331. actions.at:478: ok -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error 325. actions.at:373: ok -stderr: -stdout: - -308. headers.at:189: ok -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none -349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... - -stderr: -stdout: -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -350. actions.at:1596: testing Default %printer and %destructor for midrule values ... -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -351. actions.at:1743: testing @$ in %initial-action implies %locations ... -./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... +./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./actions.at:1051: $PREPARSER ./input '(x)' @@ -9928,11 +10066,7 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:24.3-4: error: useless %printer for type <> [-Werror=other] ./actions.at:1051: $PREPARSER ./input '!!!' -./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -9959,16 +10093,8 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 -stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] -input.y:33.3-23: error: unset value: $$ [-Werror=other] -input.y:32.3-23: error: unused value: $3 [-Werror=other] ./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10005,10 +10131,8 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 ./actions.at:1051: $PREPARSER ./input '(x)(x)x' stderr: -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10029,24 +10153,25 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc 338. actions.at:1051: ok +./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... +./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./headers.at:323: echo "x4" >>expout - -./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./headers.at:326: echo "x7" >>expout +./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y +./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c stderr: -352. actions.at:1744: testing @$ in %destructor implies %locations ... -./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./headers.at:327: echo "x8" >>expout +./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y +./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc stderr: stdout: ./actions.at:1050: $PREPARSER ./input '(x)' -stdout: -./actions.at:478: $PREPARSER ./input stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10060,14 +10185,8 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./actions.at:1050: $PREPARSER ./input '!' -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -332. actions.at:478: ok stderr: -stdout: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10076,66 +10195,8 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1238: $PREPARSER ./input --debug -stderr: -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token 'a' (<*>//e printer) -Shifting token 'a' (<*>//e printer) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'b' ( printer) -Shifting token 'b' ( printer) -Entering state 3 -Stack now 0 1 3 -Reading a token -Next token is token 'c' ('c' printer) -Shifting token 'c' ('c' printer) -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token 'd' ('d' printer) -Shifting token 'd' ('d' printer) -Entering state 6 -Stack now 0 1 3 5 6 -Reading a token -Next token is token 'e' (<*>//e printer) -Shifting token 'e' (<*>//e printer) -Entering state 7 -Stack now 0 1 3 5 6 7 -Reading a token -Next token is token 'f' (<*>//e printer) -Shifting token 'f' (<*>//e printer) -Entering state 8 -Stack now 0 1 3 5 6 7 8 -Reading a token -Now at end of input. -syntax error, unexpected end of file, expecting 'g' -Error: popping token 'f' (<*>//e printer) -Stack now 0 1 3 5 6 7 -Error: popping token 'e' (<*>//e printer) -Stack now 0 1 3 5 6 -Error: popping token 'd' ('d' printer) -Stack now 0 1 3 5 -Error: popping token 'c' ('c' printer) -Stack now 0 1 3 -Error: popping token 'b' ( printer) -Stack now 0 1 -Error: popping token 'a' (<*>//e printer) -Stack now 0 -Cleanup: discarding lookahead token "end of file" () -Stack now 0 -./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc -./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1050: $PREPARSER ./input '!!!' stderr: -346. actions.at:1174: ok sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10146,12 +10207,8 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -353. actions.at:1745: testing @$ in %printer implies %locations ... -./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1050: $PREPARSER ./input '(y)' - stderr: -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -10165,9 +10222,37 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./actions.at:1054: $PREPARSER ./input '(x)' +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: $PREPARSER ./input '!' +stderr: +stdout: +./actions.at:1053: $PREPARSER ./input '(x)' +stderr: ./actions.at:1050: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: stderr: +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10203,53 +10288,24 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./actions.at:1121: $PREPARSER ./input --debug +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1050: $PREPARSER ./input '(x)(x)x' +./actions.at:1054: $PREPARSER ./input '!!!' stderr: -./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token 'a' (1.1: <> printer for 'a' @ 1) -Shifting token 'a' (1.1: <> printer for 'a' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Entering state 3 -Stack now 0 1 3 -Reading a token -Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token 'd' (1.4: <> printer for 'd' @ 4) -Shifting token 'd' (1.4: <> printer for 'd' @ 4) -Entering state 6 -Stack now 0 1 3 5 6 -Reading a token -Now at end of input. -1.5: syntax error, unexpected end of file, expecting 'e' -Error: popping token 'd' (1.4: <> printer for 'd' @ 4) -Stack now 0 1 3 5 -Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Stack now 0 1 3 -Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Stack now 0 1 -Error: popping token 'a' (1.1: <> printer for 'a' @ 1) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (1.5: ) -Stack now 0 stderr: -./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10270,187 +10326,16 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -337. actions.at:1050: ok -345. actions.at:1071: ok -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y - -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -351. actions.at:1743: ok - -355. actions.at:1856: testing Qualified $$ in actions: glr.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -stderr: -stdout: -./actions.at:1416: $PREPARSER ./input0 --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <> for 'S' @ 1) -./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -352. actions.at:1744: ok - -stderr: -stdout: -./actions.at:1479: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token 'a' ('a') -Shifting token 'a' ('a') -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'b' ('b') -syntax error -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token 'b' ('b') -Shifting token 'b' ('b') -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token "invalid token" () -Error: popping token 'b' ('b') -DESTROY 'b' -Stack now 0 1 3 -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token "invalid token" () -Error: discarding token "invalid token" () -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Reading a token -Now at end of input. -Cleanup: discarding lookahead token "end of file" () -Stack now 0 1 3 -Cleanup: popping token error () -Cleanup: popping token 'a' ('a') -DESTROY 'a' -./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -348. actions.at:1429: ok - -359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... -./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:33.3-23: error: unset value: $$ [-Werror=other] - 33 | { @$ = 4; } // Only used. - | ^~~~~~~~~~~~~~~~~~~~~ -input.y:32.3-23: error: unused value: $3 [-Werror=other] - 32 | { USE ($$); @$ = 3; } // Only set. - | ^~~~~~~~~~~~~~~~~~~~~ -./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror -stderr: -stdout: -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -309. headers.at:191: ok - -358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... -stderr: -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -360. actions.at:1918: testing YYBACKUP ... -./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -353. actions.at:1745: ok -stderr: -stdout: - -349. actions.at:1532: ok - -361. types.at:25: testing %union vs. api.value.type ... -./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -362. types.at:44: testing %yacc vs. api.value.type=union ... -./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -362. types.at:44: ok -361. types.at:25: ok - -stderr: -input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] -input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] - -./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error -363. types.at:139: testing yacc.c api.value.type={double} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -364. types.at:139: testing yacc.c api.value.type={double} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none -./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS -stderr: -stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: -stdout: -307. headers.at:188: ok - -stderr: -stdout: -./actions.at:1053: $PREPARSER ./input '(x)' -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) Freeing token END (3@30-39) -Freeing nterm input (2@0-29) +Freeing nterm input (5@0-29) Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -365. types.at:139: testing yacc.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1053: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) @@ -10461,7 +10346,23 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +337. actions.at:1050: ./actions.at:1054: $PREPARSER ./input '(y)' + ok +stderr: +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. ./actions.at:1053: $PREPARSER ./input '!!!' +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -10473,9 +10374,48 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' ./actions.at:1053: $PREPARSER ./input '(y)' stderr: sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' Freeing token 'y' (1@10-19) @@ -10489,6 +10429,7 @@ Successful parse. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1054: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10525,9 +10466,32 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. +stderr: ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +345. actions.at:1071: testing Default tagless %printer and %destructor ... +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1053: $PREPARSER ./input '(x)(x)x' +340. actions.at:1054: ok stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10549,240 +10513,22 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -339. actions.at:1053: ok - -366. types.at:139: testing yacc.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./actions.at:1907: $PREPARSER ./input -stderr: -'b' destructor -'a' destructor -./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -359. actions.at:1863: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -367. types.at:139: testing yacc.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./actions.at:1657: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 30): --> $$ = nterm $@1 (: ) -Entering state 2 -Stack now 0 2 -Reducing stack by rule 2 (line 31): --> $$ = nterm @2 (: 2) -Entering state 4 -Stack now 0 2 4 -Reducing stack by rule 3 (line 32): --> $$ = nterm @3 (: 3) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 4 (line 33): --> $$ = nterm @4 (: 4) -Entering state 6 -Stack now 0 2 4 5 6 -Reading a token -Now at end of input. -syntax error -Error: popping nterm @4 (: 4) -DESTROY 4 -Stack now 0 2 4 5 -Error: popping nterm @3 (: 3) -DESTROY 3 -Stack now 0 2 4 -Error: popping nterm @2 (: 2) -DESTROY 2 -Stack now 0 2 -Error: popping nterm $@1 (: ) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (: ) -Stack now 0 -./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -350. actions.at:1596: ok - -368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -366. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -363. types.at:139: ok - - -369. types.at:139: testing yacc.c api.value.type={struct bar} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -365. types.at:139: ok - -371. types.at:139: testing yacc.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -370. types.at:139: ok - -372. types.at:139: testing yacc.c api.value.type={union foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -./actions.at:1955: $PREPARSER ./input -stderr: -./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -354. actions.at:1856: ok -360. actions.at:1918: ok - - -373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -367. types.at:139: ok -375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -371. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -368. types.at:139: ok +339. actions.at:1053: ok -376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./actions.at:1417: $PREPARSER ./input1 --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <*> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <*> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <*> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) -./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror stderr: -347. actions.at:1307: ok stdout: -stderr: ./actions.at:1057: $PREPARSER ./input '(x)' stderr: -stdout: -./actions.at:1056: $PREPARSER ./input '(x)' - -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:22.3-4: error: useless %printer for type <> [-Werror=other] stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10796,23 +10542,8 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./actions.at:1057: $PREPARSER ./input '!' -./actions.at:1056: $PREPARSER ./input '!' -./headers.at:324: echo "x5" >>expout stderr: +./actions.at:1057: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -10822,31 +10553,55 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example . c A A $end + First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] + Second example . c A A $end + Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 20.000000 + First example b . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example b . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example c . c A A $end + First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example c . A $end + Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . c A A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example b c . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] + Second example b c . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example b d . + First reduce derivation a -> [ b d . ] + Second reduce derivation a -> [ b d -> [ d . ] ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example c d . + First reduce derivation a -> [ c d . ] + Second reduce derivation a -> [ c d -> [ d . ] ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] ./actions.at:1057: $PREPARSER ./input '!!!' -./actions.at:1056: $PREPARSER ./input '!!!' stderr: -377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10856,25 +10611,10 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1056: $PREPARSER ./input '(y)' -stderr: -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] ./actions.at:1057: $PREPARSER ./input '(y)' -./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10888,49 +10628,11 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -stderr: -378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1056: $PREPARSER ./input '(x)(x)x' ./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10967,28 +10669,7 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -341. actions.at:1056: ok ./actions.at:1057: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) @@ -11011,120 +10692,34 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -379. types.at:139: testing yacc.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c 342. actions.at:1057: ok -stderr: -stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -stdout: -stdout: - -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -373. types.at:139: 355. actions.at:1856: ok - ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -372. types.at:139: ok -stderr: -stdout: -./types.at:139: $PREPARSER ./test -380. types.at:139: testing yacc.c api.value.type=union %header ... - - -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -375. types.at:139: ok - -382. types.at:139: testing glr.c api.value.type={double} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -381. types.at:139: testing glr.c api.value.type={double} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -383. types.at:139: testing glr.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -384. types.at:139: testing glr.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stdout: -./types.at:139: $PREPARSER ./test +348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -364. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - +input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] +input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] +./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stdout: -./types.at:139: $PREPARSER ./test +./actions.at:478: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -385. types.at:139: testing glr.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +333. actions.at:478: ok stderr: stdout: -./actions.at:1054: $PREPARSER ./input '(x)' -376. types.at:139: ok +./actions.at:1056: $PREPARSER ./input '(x)' +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -11137,9 +10732,9 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '!' +./actions.at:1056: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -11148,11 +10743,9 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '!!!' -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -386. types.at:139: testing glr.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1056: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -11163,9 +10756,12 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(y)' +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1056: $PREPARSER ./input '(y)' stderr: +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -11178,15 +10774,10 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stdout: +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11222,11 +10813,9 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -./actions.at:1054: $PREPARSER ./input '(x)(x)x' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1056: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -11247,542 +10836,437 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -369. types.at:139: ok -374. types.at:139: ok -340. actions.at:1054: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - - - -387. types.at:139: testing glr.c api.value.type={struct bar} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -388. types.at:139: testing glr.c api.value.type={struct bar} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -389. types.at:139: testing glr.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stderr: -stdout: -./headers.at:325: echo "x6" >>expout -./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -377. types.at:139: ok -378. types.at:139: ok - - -./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -391. types.at:139: testing glr.c %union { float fval; int ival; }; ... -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -390. types.at:139: testing glr.c api.value.type={union foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -379. types.at:139: ok +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +341. actions.at:1056: ok +./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test +input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:24.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +350. actions.at:1596: testing Default %printer and %destructor for midrule values ... +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -380. types.at:139: ok -392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] +input.y:33.3-23: error: unset value: $$ [-Werror=other] +input.y:32.3-23: error: unused value: $3 [-Werror=other] +./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +270. counterexample.at:610: ok +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:374: $PREPARSER ./input -stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -stderr: -1.1 -1.1: syntax error -./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1479: $PREPARSER ./input --debug stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) +Next token is token 'a' ('a') +Shifting token 'a' ('a') Entering state 1 Stack now 0 1 Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) +Next token is token 'b' ('b') +syntax error +Shifting token error () Entering state 3 Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token 'b' ('b') +Shifting token 'b' ('b') +Entering state 5 +Stack now 0 1 3 5 Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 +Next token is token "invalid token" () +Error: popping token 'b' ('b') +DESTROY 'b' +Stack now 0 1 3 +Error: popping token error () Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Next token is token "invalid token" () +Error: discarding token "invalid token" () +Error: popping token error () +Stack now 0 1 +Shifting token error () Entering state 3 Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -356. actions.at:1856: ok -326. actions.at:374: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +Now at end of input. +Cleanup: discarding lookahead token "end of file" () +Stack now 0 1 3 +Cleanup: popping token error () +Cleanup: popping token 'a' ('a') +DESTROY 'a' +./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +351. actions.at:1743: testing @$ in %initial-action implies %locations ... +./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +348. actions.at:1429: ok +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +352. actions.at:1744: testing @$ in %destructor implies %locations ... +./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:1856: $PREPARSER ./input --debug -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +349. actions.at:1532: ok +stderr: +stdout: +./actions.at:1238: $PREPARSER ./input --debug stderr: + Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) +Next token is token 'a' (<*>//e printer) +Shifting token 'a' (<*>//e printer) Entering state 1 +Stack now 0 1 Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) +Next token is token 'b' ( printer) +Shifting token 'b' ( printer) Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -Starting parse -Entering state 0 +Stack now 0 1 3 Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 +Next token is token 'c' ('c' printer) +Shifting token 'c' ('c' printer) +Entering state 5 +Stack now 0 1 3 5 Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 +Next token is token 'd' ('d' printer) +Shifting token 'd' ('d' printer) +Entering state 6 +Stack now 0 1 3 5 6 +Reading a token +Next token is token 'e' (<*>//e printer) +Shifting token 'e' (<*>//e printer) +Entering state 7 +Stack now 0 1 3 5 6 7 +Reading a token +Next token is token 'f' (<*>//e printer) +Shifting token 'f' (<*>//e printer) +Entering state 8 +Stack now 0 1 3 5 6 7 8 Reading a token Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -stderr: -./actions.at:1856: sed -ne '/ival:/p' stderr -357. actions.at:1856: ok - -stdout: -./actions.at:478: $PREPARSER ./input -396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./headers.at:326: echo "x7" >>expout -./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c -333. actions.at:478: ok - -397. types.at:139: testing glr.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: . c A A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 4: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 9.000000 - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . c A A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: b c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: b d . - First reduce derivation - a - `-> 1: b d . - Second reduce derivation - a - `-> 1: b d - `-> 7: d . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: c d . - First reduce derivation - a - `-> 2: c d . - Second reduce derivation - a - `-> 2: c d - `-> 7: d . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +syntax error, unexpected end of file, expecting 'g' +Error: popping token 'f' (<*>//e printer) +Stack now 0 1 3 5 6 7 +Error: popping token 'e' (<*>//e printer) +Stack now 0 1 3 5 6 +Error: popping token 'd' ('d' printer) +Stack now 0 1 3 5 +Error: popping token 'c' ('c' printer) +Stack now 0 1 3 +Error: popping token 'b' ( printer) +Stack now 0 1 +Error: popping token 'a' (<*>//e printer) +Stack now 0 +Cleanup: discarding lookahead token "end of file" () +Stack now 0 +./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./headers.at:327: echo "x8" >>expout -382. types.at:139: 381. types.at:139: ./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y - ok - ok stderr: +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:33.3-23: error: unset value: $$ [-Werror=other] + 33 | { @$ = 4; } // Only used. + | ^~~~~~~~~~~~~~~~~~~~~ +input.y:32.3-23: error: unused value: $3 [-Werror=other] + 32 | { USE ($$); @$ = 3; } // Only set. + | ^~~~~~~~~~~~~~~~~~~~~ stdout: -./types.at:139: $PREPARSER ./test -stdout: -stderr: -./types.at:139: $PREPARSER ./test +./actions.at:1416: $PREPARSER ./input0 --debug stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -384. types.at:139: ok -383. types.at:139: ok +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <> for 'S' @ 1) +./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +346. actions.at:1174: ok +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +353. actions.at:1745: testing @$ in %printer implies %locations ... +./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y stderr: -398. types.at:139: testing glr.c api.value.type=union %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - stdout: -./types.at:139: $PREPARSER ./test - -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -399. types.at:139: testing lalr1.cc api.value.type={double} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc -400. types.at:139: testing lalr1.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -385. types.at:139: ok -401. types.at:139: testing lalr1.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./headers.at:328: echo "x9" >>expout +./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y stderr: - stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1121: $PREPARSER ./input --debug +354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -386. types.at:139: ok -402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token 'a' (1.1: <> printer for 'a' @ 1) +Shifting token 'a' (1.1: <> printer for 'a' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Entering state 3 +Stack now 0 1 3 +Reading a token +Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token 'd' (1.4: <> printer for 'd' @ 4) +Shifting token 'd' (1.4: <> printer for 'd' @ 4) +Entering state 6 +Stack now 0 1 3 5 6 +Reading a token +Now at end of input. +1.5: syntax error, unexpected end of file, expecting 'e' +Error: popping token 'd' (1.4: <> printer for 'd' @ 4) +Stack now 0 1 3 5 +Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Stack now 0 1 3 +Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Stack now 0 1 +Error: popping token 'a' (1.1: <> printer for 'a' @ 1) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (1.5: ) +Stack now 0 +./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +345. actions.at:1071: ok +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +355. actions.at:1856: testing Qualified $$ in actions: glr.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc stderr: +input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] +input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -387. types.at:139: ok -388. types.at:139: ok -403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - - -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... -389. types.at:139: ok -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none +351. actions.at:1743: ok -406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -391. types.at:139: ok +352. actions.at:1744: ok +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none +356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -390. types.at:139: ok -407. types.at:139: testing lalr1.cc api.value.type={union foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test +353. actions.at:1745: ok +358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -392. types.at:139: ok -408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test - +./actions.at:1856: $PREPARSER ./input --debug stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -393. types.at:139: ok - -409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1856: sed -ne '/ival:/p' stderr +354. actions.at:1856: ok stderr: stdout: -./types.at:139: $PREPARSER ./test -stdout: -./types.at:139: $PREPARSER ./test -stderr: +./actions.at:1657: $PREPARSER ./input --debug stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -395. types.at:139: ok -394. types.at:139: ok +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 30): +-> $$ = nterm $@1 (: ) +Entering state 2 +Stack now 0 2 +Reducing stack by rule 2 (line 31): +-> $$ = nterm @2 (: 2) +Entering state 4 +Stack now 0 2 4 +Reducing stack by rule 3 (line 32): +-> $$ = nterm @3 (: 3) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 4 (line 33): +-> $$ = nterm @4 (: 4) +Entering state 6 +Stack now 0 2 4 5 6 +Reading a token +Now at end of input. +syntax error +Error: popping nterm @4 (: 4) +DESTROY 4 +Stack now 0 2 4 5 +Error: popping nterm @3 (: 3) +DESTROY 3 +Stack now 0 2 4 +Error: popping nterm @2 (: 2) +DESTROY 2 +Stack now 0 2 +Error: popping nterm $@1 (: ) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (: ) +Stack now 0 +./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +350. actions.at:1596: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -411. types.at:139: testing lalr1.cc api.value.type=union ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -412. types.at:139: testing lalr1.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... +./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +360. actions.at:1918: testing YYBACKUP ... +./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test +./actions.at:1417: $PREPARSER ./input1 --debug stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -396. types.at:139: ok +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <*> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <*> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <*> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) +./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +347. actions.at:1307: ok -413. types.at:139: testing lalr1.cc api.value.type=variant ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +361. types.at:25: testing %union vs. api.value.type ... +./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./actions.at:1907: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -397. types.at:139: ok +'b' destructor +'a' destructor +./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +359. actions.at:1863: ok +361. types.at:25: ok + + +362. types.at:44: testing %yacc vs. api.value.type=union ... +./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +363. types.at:139: testing yacc.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +362. types.at:44: ok -414. types.at:139: testing lalr1.cc api.value.type=variant %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./actions.at:1856: $PREPARSER ./input --debug +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: Starting parse Entering state 0 @@ -11794,7 +11278,7 @@ Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Reducing stack 0 by rule 1 (line 55): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) @@ -11806,6 +11290,8 @@ Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +364. types.at:139: testing yacc.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: Starting parse Entering state 0 @@ -11817,7 +11303,7 @@ Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Reducing stack 0 by rule 1 (line 55): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) @@ -11829,103 +11315,133 @@ Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed -ne '/ival:/p' stderr +355. actions.at:1856: ok + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +365. types.at:139: testing yacc.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -398. types.at:139: ok +./actions.at:1955: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +360. actions.at:1918: ok +366. types.at:139: testing yacc.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: -./headers.at:328: echo "x9" >>expout -./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y -358. actions.at:1856: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ok -415. types.at:139: testing lalr1.cc api.value.type=variant ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -416. types.at:139: testing lalr1.cc api.value.type=variant %header ... +./headers.at:329: echo "xa" >>expout +./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +363. types.at:139: ok ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +365. types.at:139: + ok + stderr: stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +367. types.at:139: testing yacc.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... +364. types.at:139: ok +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./actions.at:1856: $PREPARSER ./input --debug +./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token "end of file" () +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token "end of file" () +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +369. types.at:139: testing yacc.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +356. actions.at:1856: ok + +370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: -./actions.at:1060: $PREPARSER ./input '(x)' +366. types.at:139: ok +./actions.at:1059: $PREPARSER ./input '(x)' +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -11938,8 +11454,12 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1060: $PREPARSER ./input '!' +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: testing yacc.c api.value.type={union foo} ... +./actions.at:1059: $PREPARSER ./input '!' +stderr: +stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -11948,9 +11468,56 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1060: $PREPARSER ./input '!!!' +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1856: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1059: $PREPARSER ./input '!!!' +stderr: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -11960,12 +11527,10 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./actions.at:1060: $PREPARSER ./input '(y)' -stdout: +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +357. actions.at:1856: ok +./actions.at:1059: $PREPARSER ./input '(y)' stderr: -./types.at:139: $PREPARSER ./test sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -11978,12 +11543,12 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -stderr: -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1060: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./actions.at:1059: $PREPARSER ./input '(xxxxx)(x)(x)y' +372. types.at:139: testing yacc.c api.value.type={union foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -12020,13 +11585,8 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./actions.at:1060: $PREPARSER ./input '(x)(x)x' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1059: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -12047,21 +11607,53 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +343. actions.at:1059: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: +./types.at:139: $PREPARSER ./test stderr: -./actions.at:1059: $PREPARSER ./input '(x)' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +367. types.at:139: stderr: + ok stdout: ./types.at:139: $PREPARSER ./test +373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +368. types.at:139: ok + +374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... +stderr: +stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +369. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +370. types.at:139: ok + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +./actions.at:1060: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -12074,8 +11666,10 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1059: $PREPARSER ./input '!' +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1060: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -12084,8 +11678,8 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1059: $PREPARSER ./input '!!!' +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1060: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -12096,13 +11690,8 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./actions.at:1059: $PREPARSER ./input '(y)' -stderr: +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1060: $PREPARSER ./input '(y)' stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -12116,13 +11705,15 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./actions.at:1059: $PREPARSER ./input '(xxxxx)(x)(x)y' -344. actions.at:1060: ok stderr: +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +./actions.at:1060: $PREPARSER ./input '(xxxxx)(x)(x)y' +stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -12158,8 +11749,9 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1059: $PREPARSER ./input '(x)(x)x' +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: ok +./actions.at:1060: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -12180,171 +11772,327 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +344. actions.at:1060: ok + +378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +379. types.at:139: testing yacc.c api.value.type=union ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -343. actions.at:1059: ok +372. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +380. types.at:139: testing yacc.c api.value.type=union %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +373. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test + stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +374. types.at:139: ok +381. types.at:139: testing glr.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + stderr: stdout: ./types.at:139: $PREPARSER ./test +382. types.at:139: testing glr.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +376. types.at:139: ok +375. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +377. types.at:139: ok stderr: +383. types.at:139: testing glr.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: ./types.at:139: $PREPARSER ./test stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +384. types.at:139: testing glr.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +378. types.at:139: ok + +385. types.at:139: testing glr.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +386. types.at:139: testing glr.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +379. types.at:139: ok + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +387. types.at:139: testing glr.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +380. types.at:139: ok + +388. types.at:139: testing glr.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: +./headers.at:330: echo "xb" >>expout +./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y +./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc +stderr: stdout: -./headers.at:329: echo "xa" >>expout ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test stderr: -./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +382. types.at:139: ok +383. types.at:139: ok + +stderr: + +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +390. types.at:139: testing glr.c api.value.type={union foo} %header ... +381. types.at:139: ok +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +389. types.at:139: testing glr.c api.value.type={union foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + stderr: +391. types.at:139: testing glr.c %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +384. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug +stderr: +stdout: ./types.at:139: $PREPARSER ./test +392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +387. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: + +386. types.at:139: ok +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr + +358. actions.at:1856: ok +393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... + +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +388. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +385. types.at:139: ok + +397. types.at:139: testing glr.c api.value.type=union ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +390. types.at:139: ok + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +391. types.at:139: ok +398. types.at:139: testing glr.c api.value.type=union %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +399. types.at:139: testing lalr1.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +389. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test + stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +392. types.at:139: ok +400. types.at:139: testing lalr1.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +401. types.at:139: testing lalr1.cc api.value.type={variant} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +394. types.at:139: ok +stderr: + +stdout: +./types.at:139: $PREPARSER ./test stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +393. types.at:139: ok + +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +395. types.at:139: ok + +404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +396. types.at:139: ok + +405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12355,6 +12103,9 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +397. types.at:139: ok + +406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12363,33 +12114,50 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +398. types.at:139: ok + +407. types.at:139: testing lalr1.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./headers.at:331: echo "xc" >>expout +./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y +./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -12397,7 +12165,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12413,18 +12180,18 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12437,60 +12204,62 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./headers.at:330: echo "xb" >>expout -./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12499,6 +12268,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12514,6 +12284,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12526,24 +12297,31 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12551,40 +12329,111 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:332: echo "xd" >>expout +./headers.at:342: "$PERL" -n -0777 -e ' + # Ignore comments. + s{/\*.*?\*/}{}gs; + s{//.*}{}g; + # Ignore warnings. + s{# *pragma .* message ".*"}{}g; + + s{\b((defined|if)\ YYDEBUG + |YYChar # Template parameter. + |YYNTOKENS # This is actually scoped in a C++ class. + |YYPUSH_MORE(?:_DEFINED)? + |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF)) # These guys are scoped. + |YY(?:_REINTERPRET)?_CAST + |YY_ATTRIBUTE(?:_PURE|_UNUSED) + |YY_CONSTEXPR + |YY_COPY + |YY_CPLUSPLUS + |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END) + |YY_INITIAL_VALUE + |YY_MOVE + |YY_MOVE_OR_COPY + |YY_MOVE_REF + |YY_NOEXCEPT + |YY_NOTHROW + |YY_NULLPTR + |YY_RVREF + |YY_USE + |YY_\w+_INCLUDED # Header guards. + |FILE\ \*yyo # Function argument. + |const\ yylocp # Function argument. + )\b}{}gx; + while (/^(.*YY.*)$/gm) + { + print "$ARGV: invalid exported YY: $1\n"; + } + if ($ARGV =~ /\.h$/) + { + while (/^(.*yy.*)$/gm) + { + print "$ARGV: invalid exported yy: $1\n"; + } + } +' -- *.hh *.h +./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -stdout: stderr: +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || + exit 77 stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12593,7 +12442,12 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:387: $PREPARSER ./c-and-cxx +stderr: +./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12607,43 +12461,52 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: -./types.at:139: $PREPARSER ./test +./headers.at:394: $PREPARSER ./parser stderr: +./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12654,27 +12517,45 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +310. headers.at:199: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12690,6 +12571,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12700,7 +12582,6 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -12708,12 +12589,20 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12732,50 +12621,50 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: stderr: stderr: +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12788,67 +12677,22 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example . c A A $end - First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] - Second example . c A A $end - Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example b . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example b . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example c . c A A $end - First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example c . A $end - Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . c A A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example b c . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] - Second example b c . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example b d . - First reduce derivation a -> [ b d . ] - Second reduce derivation a -> [ b d -> [ d . ] ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example c d . - First reduce derivation a -> [ c d . ] - Second reduce derivation a -> [ c d -> [ d . ] ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -12856,119 +12700,153 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -270. counterexample.at:610: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' -stdout: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +401. types.at:139: ok + +409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +399. types.at:139: ok + +410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +402. types.at:139: ok + +411. types.at:139: testing lalr1.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +400. types.at:139: ok + +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +412. types.at:139: testing lalr1.cc api.value.type=union %header ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: stderr: +404. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +403. types.at:139: ok + + +413. types.at:139: testing lalr1.cc api.value.type=variant ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +414. types.at:139: testing lalr1.cc api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./headers.at:331: echo "xc" >>expout -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y -./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +405. types.at:139: ok + +415. types.at:139: testing lalr1.cc api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +407. types.at:139: ok + +416. types.at:139: testing lalr1.cc api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +406. types.at:139: ok + +417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12989,47 +12867,54 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -13040,6 +12925,8 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13052,10 +12939,10 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13064,7 +12951,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13086,46 +12972,65 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check --std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++11 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13134,26 +13039,26 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13166,7 +13071,6 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: stdout: @@ -13181,6 +13085,8 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13193,12 +13099,10 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13210,27 +13114,35 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13238,158 +13150,121 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -401. types.at:139: ok - -420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./headers.at:332: echo "xd" >>expout -./headers.at:342: "$PERL" -n -0777 -e ' - # Ignore comments. - s{/\*.*?\*/}{}gs; - s{//.*}{}g; - # Ignore warnings. - s{# *pragma .* message ".*"}{}g; +408. types.at:139: ok - s{\b((defined|if)\ YYDEBUG - |YYChar # Template parameter. - |YYNTOKENS # This is actually scoped in a C++ class. - |YYPUSH_MORE(?:_DEFINED)? - |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF)) # These guys are scoped. - |YY(?:_REINTERPRET)?_CAST - |YY_ATTRIBUTE(?:_PURE|_UNUSED) - |YY_CONSTEXPR - |YY_COPY - |YY_CPLUSPLUS - |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END) - |YY_INITIAL_VALUE - |YY_MOVE - |YY_MOVE_OR_COPY - |YY_MOVE_REF - |YY_NOEXCEPT - |YY_NOTHROW - |YY_NULLPTR - |YY_RVREF - |YY_USE - |YY_\w+_INCLUDED # Header guards. - |FILE\ \*yyo # Function argument. - |const\ yylocp # Function argument. - )\b}{}gx; - while (/^(.*YY.*)$/gm) - { - print "$ARGV: invalid exported YY: $1\n"; - } - if ($ARGV =~ /\.h$/) - { - while (/^(.*yy.*)$/gm) - { - print "$ARGV: invalid exported yy: $1\n"; - } - } -' -- *.hh *.h stderr: +418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... stdout: -./types.at:139: ./check +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -405. types.at:139: ok -stderr: -stdout: -./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc - -399. types.at:139: ok - -421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || - exit 77 +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./headers.at:387: $PREPARSER ./c-and-cxx +./types.at:139: $PREPARSER ./test stderr: -./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -402. types.at:139: ok - ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... -======== Testing with C++ standard flags: '' stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -400. types.at:139: ok - -424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13398,17 +13273,15 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -409. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -425. types.at:139: testing glr.cc api.value.type={double} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13421,28 +13294,33 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -407. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test - stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -426. types.at:139: testing glr.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13450,61 +13328,95 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -410. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -427. types.at:139: testing glr.cc api.value.type={variant} ... ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -411. types.at:139: ok stderr: stdout: -./headers.at:394: $PREPARSER ./parser +./types.at:139: $PREPARSER ./test stderr: - -./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -428. types.at:139: testing glr.cc api.value.type={variant} %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -406. types.at:139: ok -404. types.at:139: ok - +409. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -310. headers.at:199: ok +419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: $PREPARSER ./test stderr: -429. types.at:139: testing glr.cc api.value.type={struct foo} ... +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +411. types.at:139: ok + +410. types.at:139: ok +420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -431. types.at:139: testing glr.cc api.value.type={struct bar} ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13512,22 +13424,41 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +412. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: @@ -13538,31 +13469,26 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -412. types.at:139: ok - -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: -432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: ./check -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -13571,39 +13497,20 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -403. types.at:139: ok - -408. types.at:139: ok -433. types.at:139: testing glr.cc api.value.type={union foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -434. types.at:139: testing glr.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: @@ -13628,105 +13535,143 @@ stderr: stdout: ./types.at:139: ./check --std=c++98 not supported +-std=c++11 not supported +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stdout: +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check -std=c++11 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: +./types.at:139: ./check ./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' +414. types.at:139: ok + +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +417. types.at:139: ok +423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++98 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS --std=c++11 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++11 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' +413. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test + +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +425. types.at:139: testing glr.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +415. types.at:139: ok stderr: stdout: + ./types.at:139: ./check -std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +426. types.at:139: testing glr.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: @@ -13734,9 +13679,12 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -13744,40 +13692,86 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +416. types.at:139: ok + +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +427. types.at:139: testing glr.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: stdout: @@ -13787,26 +13781,29 @@ stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -417. types.at:139: ok - -418. types.at:139: ok -435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -13821,74 +13818,104 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -414. types.at:139: ok - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -437. types.at:139: testing glr.cc api.value.type=union ... +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +418. types.at:139: ok stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +428. types.at:139: testing glr.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13896,17 +13923,31 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13914,37 +13955,44 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -413. types.at:139: ok ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -438. types.at:139: testing glr.cc api.value.type=union %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -13952,24 +14000,14 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -13989,12 +14027,17 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: @@ -14004,31 +14047,44 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +421. types.at:139: ok + +429. types.at:139: testing glr.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +419. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14037,6 +14093,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14047,24 +14104,31 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +420. types.at:139: ok stderr: stdout: + ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +431. types.at:139: testing glr.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14073,19 +14137,24 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +422. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14093,83 +14162,62 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -415. types.at:139: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ok - +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -439. types.at:139: testing glr2.cc api.value.type={double} ... + ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -416. types.at:139: ok - -stderr: -440. types.at:139: testing glr2.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +424. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +433. types.at:139: testing glr.cc api.value.type={union foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14178,11 +14226,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14201,51 +14244,63 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +423. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +434. types.at:139: testing glr.cc api.value.type={union foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -419. types.at:139: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -441. types.at:139: testing glr2.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14262,6 +14317,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14270,7 +14326,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14294,27 +14349,20 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14322,78 +14370,77 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +427. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +425. types.at:139: stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14401,32 +14448,45 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +426. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +437. types.at:139: testing glr.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14450,18 +14510,18 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -14481,87 +14541,64 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: ./check ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -stderr: -./types.at:139: ./check -stdout: --std=c++98 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++03 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14570,6 +14607,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -14577,113 +14615,127 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +428. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +430. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +438. types.at:139: testing glr.cc api.value.type=union %header ... +439. types.at:139: testing glr2.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +429. types.at:139: ok + +440. types.at:139: testing glr2.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -420. types.at:139: ok - -442. types.at:139: testing glr2.cc api.value.type={variant} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -421. types.at:139: ok - -443. types.at:139: testing glr2.cc api.value.type={struct foo} ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14691,51 +14743,76 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -422. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -14745,58 +14822,77 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -423. types.at:139: ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - ok +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -424. types.at:139: ok - - -445. types.at:139: testing glr2.cc api.value.type={struct bar} ... +stdout: +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' -446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +433. types.at:139: ok + +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +432. types.at:139: ok +441. types.at:139: testing glr2.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +442. types.at:139: testing glr2.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +431. types.at:139: ok stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +443. types.at:139: testing glr2.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14805,22 +14901,13 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14829,6 +14916,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14837,31 +14925,42 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14874,49 +14973,111 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +434. types.at:139: stderr: +stdout: +./types.at:139: ./check + ok +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +435. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +445. types.at:139: testing glr2.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14924,27 +15085,51 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +436. types.at:139: ok + +446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -425. types.at:139: ok - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14952,44 +15137,80 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -447. types.at:139: testing glr2.cc api.value.type={union foo} ... +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +437. types.at:139: ok + +447. types.at:139: testing glr2.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' +stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check @@ -14998,127 +15219,187 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -426. types.at:139: ok +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -427. types.at:139: ok -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: -449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... stdout: stdout: -======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -stderr: +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./types.at:139: ./check +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -430. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +438. types.at:139: ok + +stderr: stdout: ./types.at:139: ./check -stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - +448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: -429. types.at:139: stdout: - ok +stdout: +stdout: ./types.at:139: ./check +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -std=c++98 not supported ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test -450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' +stderr: +stderr: +stdout: +./types.at:139: ./check +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -451. types.at:139: testing glr2.cc api.value.type=union ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' -428. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: --std=c++03 not supported -======== Testing with C++ standard flags: '' ./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - stderr: stdout: ./types.at:139: ./check --std=c++98 not supported +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -452. types.at:139: testing glr2.cc api.value.type=union %header ... +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -15128,141 +15409,177 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: ./check stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: stdout: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' +stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -431. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: stderr: +./types.at:139: ./check stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -std=c++03 not supported ======== Testing with C++ standard flags: '' -453. types.at:377: testing lalr1.cc: Named %union ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -453. types.at:377: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -454. types.at:377: testing glr.cc: Named %union ... -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -454. types.at:377: ok - -455. scanner.at:326: testing Token numbers: yacc.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -432. types.at:139: ok - -456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -433. types.at:139: ok stderr: -434. types.at:139: stdout: - ok -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c - -./scanner.at:326: $PREPARSER ./input +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +439. types.at:139: ok stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -457. scanner.at:326: testing Token numbers: glr.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -455. scanner.at:326: ok -458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -459. scanner.at:326: testing Token numbers: lalr1.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -456. scanner.at:326: ok - +./types.at:139: $PREPARSER ./test stderr: -460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: ./types.at:139: $PREPARSER ./test stderr: -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: @@ -15284,64 +15601,59 @@ stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -435. types.at:139: stdout: - ok -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -457. scanner.at:326: ok - - +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c stderr: -461. scanner.at:326: testing Token numbers: glr.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $PREPARSER ./input -462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -458. scanner.at:326: ok - -463. scanner.at:326: testing Token numbers: glr2.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -436. types.at:139: ok -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +440. types.at:139: ok -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -437. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15349,113 +15661,104 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -465. scanner.at:326: testing Token numbers: lalr1.d ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y -465. scanner.at:326: skipped (scanner.at:326) stderr: stdout: ./types.at:139: ./check - -std=c++98 not supported ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./scanner.at:326: $PREPARSER ./input -466. scanner.at:326: ./types.at:139: ./check - skipped (scanner.at:326) --std=c++03 not supported -======== Testing with C++ standard flags: '' +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: - stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +441. types.at:139: ok + +451. types.at:139: testing glr2.cc api.value.type=union ... ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -459. scanner.at:326: ok -467. scanner.at:326: testing Token numbers: lalr1.java ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y - stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ... -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y -460. scanner.at:326: stderr: - ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +443. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: + stdout: ./types.at:139: $PREPARSER ./test -467. scanner.at:326: stderr: - skipped (scanner.at:326) -438. types.at:139: ok +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ======== Testing with C++ standard flags: '' - +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +452. types.at:139: testing glr2.cc api.value.type=union %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: ./check - ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ... -./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -468. scanner.at:326: skipped (scanner.at:326) - -470. calc.at:1334: testing Calculator parse.trace ... -./calc.at:1334: mv calc.y.tmp calc.y - -./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -471. calc.at:1336: testing Calculator %header ... -./calc.at:1336: mv calc.y.tmp calc.y - -./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -472. calc.at:1337: testing Calculator %debug %locations ... -./calc.at:1337: mv calc.y.tmp calc.y - -./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -15463,51 +15766,92 @@ stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++98 not supported +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +442. types.at:139: ok + +453. types.at:377: testing lalr1.cc: Named %union ... +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +453. types.at:377: ok + +454. types.at:377: testing glr.cc: Named %union ... +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +454. types.at:377: ok + +455. scanner.at:326: testing Token numbers: yacc.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: ./check --std=c++03 not supported +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: stderr: +stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: @@ -15518,20 +15862,27 @@ ======== Testing with C++ standard flags: '' stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: ./check -std=c++03 not supported @@ -15541,62 +15892,243 @@ stderr: stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +445. types.at:139: ok + +456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +455. scanner.at:326: ok +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +457. scanner.at:326: testing Token numbers: glr.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +456. scanner.at:326: ok + +458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +444. types.at:139: ok + +459. scanner.at:326: testing Token numbers: lalr1.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +457. scanner.at:326: ok + +stderr: +460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +458. scanner.at:326: ok + stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +461. scanner.at:326: testing Token numbers: glr.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +446. types.at:139: ok + +462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./calc.at:1336: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +459. scanner.at:326: ok +stderr: +stdout: +463. scanner.at:326: testing Token numbers: glr2.cc ... ./types.at:139: ./check +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ======== Testing with C++ standard flags: '' +./scanner.at:326: $PREPARSER ./input ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +460. scanner.at:326: ok + +464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -input: stdout: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1336: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +447. types.at:139: ok + +465. scanner.at:326: testing Token numbers: lalr1.d ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y +465. scanner.at:326: skipped (scanner.at:326) + +466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./scanner.at:326: $PREPARSER ./input stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +461. scanner.at:326: ok +466. scanner.at:326: skipped (scanner.at:326) + + +467. scanner.at:326: testing Token numbers: lalr1.java ... +468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +468. scanner.at:326: skipped (scanner.at:326) + +469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ... +./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +467. scanner.at:326: skipped (scanner.at:326) + +470. calc.at:1334: testing Calculator parse.trace ... +./calc.at:1334: mv calc.y.tmp calc.y + +./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +462. scanner.at:326: ok + +471. calc.at:1336: testing Calculator %header ... +./calc.at:1336: mv calc.y.tmp calc.y + +./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1334: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -15608,17 +16140,7 @@ || /\t/ )' calc.c -stderr: -stdout: -./types.at:139: ./check -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -15632,15 +16154,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./types.at:139: ./check ./calc.at:1334: $PREPARSER ./calc input -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | 1 2 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -16661,8 +17175,6 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -stderr: Starting parse Entering state 0 Stack now 0 @@ -17681,15 +18193,13 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 2 +./calc.at:1334: $PREPARSER ./calc input stderr: -stderr: -stdout: -./types.at:139: ./check stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -./calc.at:1337: "$PERL" -ne ' +./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1336: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -17698,10 +18208,84 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c + )' calc.c calc.h +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1336: $PREPARSER ./calc input +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: | 1 2 -./calc.at:1334: $PREPARSER ./calc input +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +input: + | 1//2 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +input: + | error +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr stderr: Starting parse Entering state 0 @@ -17725,20 +18309,56 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 + | 1 = 2 = 3 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +input: | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1337: $PREPARSER ./calc input + | +1 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +syntax error +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1336: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +448. types.at:139: ok ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -17749,6 +18369,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: cat stderr + +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1336: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -17770,6 +18396,54 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +472. calc.at:1337: testing Calculator %debug %locations ... +./calc.at:1337: mv calc.y.tmp calc.y + +./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1334: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1334: $PREPARSER ./calc input +stderr: +syntax error +error: 2222 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -17779,126 +18453,2600 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.1: ) +Stack now 0 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.1: ) +Stack now 0 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1334: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1334: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1336: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +./calc.at:1334: cat stderr +input: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1334: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1336: $PREPARSER ./calc input +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1334: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1336: $PREPARSER ./calc input +stderr: +input: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +stderr: +./calc.at:1334: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1336: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1334: cat stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1334: $PREPARSER ./calc /dev/null +stderr: +memory exhausted +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +memory exhausted +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1334: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +input: +./calc.at:1334: cat stderr + | (1 + #) = 1111 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1334: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stdout: +./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./scanner.at:330: $PREPARSER ./input +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stdout: +./calc.at:1336: cat stderr +./types.at:139: ./check +input: +./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1334: cat stderr +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +469. scanner.at:330: ok +stderr: +syntax error: invalid character: '#' +input: + + | (- *) + (1 2) = 1 +./calc.at:1334: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1336: cat stderr +input: +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... +./calc.at:1338: mv calc.y.tmp calc.y + +./calc.at:1334: cat stderr +./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1336: cat stderr + | (* *) + (*) + (*) +./calc.at:1334: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1336: $PREPARSER ./calc input +stderr: +error: null divisor +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1334: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1334: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +input: +stdout: + | 1 + 2 * 3 + !- ++ +./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +stderr: +./calc.at:1337: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +input: +471. calc.at:1336: ok + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) @@ -18789,6 +21937,8 @@ Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1334: cat stderr stderr: Starting parse Entering state 0 @@ -19808,36 +22958,13 @@ Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1336: cat stderr -stderr: -stdout: -./types.at:139: ./check -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: input: - | 1//2 + | 1 + 2 * 3 + !* ++ +./calc.at:1334: $PREPARSER ./calc input | 1 2 -./calc.at:1336: $PREPARSER ./calc input ./calc.at:1337: $PREPARSER ./calc input stderr: -stderr: -======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 @@ -19846,22 +22973,81 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -syntax error -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -19883,24 +23069,7 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 -stderr: -syntax error -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1334: cat stderr -input: - | 1//2 -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: $PREPARSER ./calc input -stderr: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -19915,21 +23084,95 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) +Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -19939,44 +23182,274 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr ./calc.at:1337: cat stderr +input: +474. calc.at:1340: testing Calculator %name-prefix "calc" ... +./calc.at:1340: mv calc.y.tmp calc.y + + | (#) + (#) = 2222 +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 -./calc.at:1336: cat stderr +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1//2 ./calc.at:1337: $PREPARSER ./calc input -input: - | error stderr: -./calc.at:1336: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: Starting parse Entering state 0 Stack now 0 @@ -20006,19 +23479,7 @@ Stack now 0 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -20046,78 +23507,8 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -stderr: -syntax error -./calc.at:1334: cat stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1334: $PREPARSER ./calc input -stderr: -./calc.at:1337: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -input: - | error -./calc.at:1337: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20132,46 +23523,72 @@ s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; -./calc.at:1336: cat stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: - | 1 = 2 = 3 -./calc.at:1336: $PREPARSER ./calc input -stdout: +./scanner.at:326: $PREPARSER ./input ./calc.at:1334: cat stderr -stderr: -syntax error ./calc.at:1337: cat stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc input: - | 1 = 2 = 3 -./calc.at:1334: $PREPARSER ./calc input stderr: -syntax error + | (1 + #) = 1111 +./calc.at:1334: $PREPARSER ./calc input +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./scanner.at:326: $PREPARSER ./input - | 1 = 2 = 3 -stderr: -./calc.at:1337: $PREPARSER ./calc input stderr: + | error Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token @@ -20180,70 +23597,184 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +463. scanner.at:326: ok +stderr: +Starting parse +Entering state 0 Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; + +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20253,6 +23784,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: cat stderr +======== Testing with C++ standard flags: '' +./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 = 2 = 3 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1334: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -20261,37 +23802,41 @@ Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) Stack now 0 8 19 -Error: popping token '=' (1.1: ) +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1334: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -20332,56 +23877,86 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr -461. scanner.at:326: ./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ok -input: -./calc.at:1334: cat stderr - | - | +1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -input: -syntax error - -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: cat stderr - | - | +1 -stderr: -./calc.at:1334: $PREPARSER ./calc input -syntax error stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 @@ -20391,15 +23966,15 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20409,17 +23984,86 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 @@ -20429,16 +24073,32 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1337: cat stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +475. calc.at:1341: testing Calculator %verbose ... +input: +./calc.at:1341: mv calc.y.tmp calc.y + | | +1 ./calc.at:1337: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: Starting parse Entering state 0 @@ -20466,19 +24126,8 @@ Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1336: $PREPARSER ./calc /dev/null +./calc.at:1334: cat stderr Starting parse Entering state 0 Stack now 0 @@ -20504,89 +24153,11 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1334: cat stderr -473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... -./calc.at:1338: mv calc.y.tmp calc.y - -./calc.at:1334: $PREPARSER ./calc /dev/null -./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -./calc.at:1337: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -stderr: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -stderr: input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1334: $PREPARSER ./calc input stderr: +stdout: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20597,44 +24168,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: cat stderr -./calc.at:1334: cat stderr -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1337: $PREPARSER ./calc input -stderr: -./calc.at:1336: cat stderr -stdout: -stderr: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: Starting parse Entering state 0 @@ -20645,289 +24178,86 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 4 12 Reading a token Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.1: ) -syntax error +syntax error: invalid character: '#' Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 4 11 Reading a token Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Error: discarding token "number" (1.1: 1) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 +Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: 1111) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -20935,7 +24265,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -20953,7 +24283,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -20963,47 +24294,507 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1337: cat stderr +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1337: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +input: + | (1 + 1) / (1 - 1) +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1337: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1337: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./scanner.at:326: $PREPARSER ./input +stdout: +./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token Next token is token "number" (1.11: 1) Shifting token "number" (1.11: 1) @@ -21271,336 +25062,30 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: $PREPARSER ./calc input -./scanner.at:326: $PREPARSER ./input -stderr: -./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +./calc.at:1338: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -21918,10 +25403,22 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -error: 2222 != 1 -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1338: $PREPARSER ./calc input ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -21932,41 +25429,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -462. scanner.at:326: ok -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: cat stderr +stderr: +464. scanner.at:326: ok +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: cat stderr +stderr: +./calc.at:1337: cat stderr +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -./calc.at:1336: cat stderr + | (!!) + (1 2) = 1 ./calc.at:1337: $PREPARSER ./calc input +470. calc.at:1334: ok input: -input: - | (!!) + (1 2) = 1 + | 1 2 stderr: -./calc.at:1334: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1336: $PREPARSER ./calc input - +./calc.at:1338: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -22109,12 +25587,8 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +1.3: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -22125,147 +25599,6 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 @@ -22397,150 +25730,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error -syntax error -error: 2222 != 1 + stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +1.3: syntax error ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -22551,7 +25743,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22561,12 +25753,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr -./calc.at:1336: cat stderr +./calc.at:1338: cat stderr input: - | (- *) + (1 2) = 1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; +476. calc.at:1342: testing Calculator %yacc ... +./calc.at:1342: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1//2 +./calc.at:1338: $PREPARSER ./calc input +stderr: +./calc.at:1337: cat stderr +1.3: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +477. calc.at:1343: testing Calculator parse.error=detailed ... +stderr: +./calc.at:1343: mv calc.y.tmp calc.y + +1.3: syntax error +./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22576,8 +25787,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: input: + | (- *) + (1 2) = 1 +./calc.at:1337: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -22727,12 +25940,8 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -474. calc.at:1340: testing Calculator %name-prefix "calc" ... - | (* *) + (*) + (*) -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1340: mv calc.y.tmp calc.y - stderr: +./calc.at:1338: cat stderr Starting parse Entering state 0 Stack now 0 @@ -22881,13 +26090,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: cat stderr -syntax error -syntax error -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -22898,327 +26100,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error input: - | (- *) + (1 2) = 1 -./calc.at:1337: cat stderr -./calc.at:1334: $PREPARSER ./calc input + | error +./calc.at:1338: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: cat stderr input: | (* *) + (*) + (*) -stderr: ./calc.at:1337: $PREPARSER ./calc input -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) stderr: +1.1: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -23372,6 +26264,8 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.1: syntax error Starting parse Entering state 0 Stack now 0 @@ -23523,8 +26417,7 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1336: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -23534,7 +26427,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -23545,175 +26437,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1334: cat stderr -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1337: cat stderr -stderr: -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (* *) + (*) + (*) -./calc.at:1334: $PREPARSER ./calc input +./calc.at:1338: cat stderr +./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: -stderr: +input: + | 1 = 2 = 3 +./calc.at:1338: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ ./calc.at:1337: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: +1.7: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.7: syntax error Starting parse Entering state 0 Stack now 0 @@ -23794,161 +26533,7 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: - | 1 + 2 * 3 + !- ++ ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) stderr: Starting parse Entering state 0 @@ -24030,15 +26615,10 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24048,6 +26628,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1337: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -24130,17 +26714,19 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1338: cat stderr Starting parse Entering state 0 Stack now 0 @@ -24221,7 +26807,23 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1334: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1340: $PREPARSER ./calc input +stderr: +input: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24232,187 +26834,77 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1336: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1334: $PREPARSER ./calc input -input: -./calc.at:1337: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1336: $PREPARSER ./calc input + | + | +1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -memory exhausted +2.1: syntax error +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: +2.1: syntax error +input: +./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr + | 1 2 +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1338: cat stderr +stderr: +syntax error +./calc.at:1341: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr | 1 + 2 * 3 + !* ++ ./calc.at:1337: $PREPARSER ./calc input +input: +./calc.at:1338: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1341: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error stderr: stderr: -memory exhausted Starting parse Entering state 0 Stack now 0 @@ -24494,9 +26986,24 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: +1.1: syntax error +stderr: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 +./calc.at:1341: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -24578,9 +27085,10 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !- ++ -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1336: "$PERL" -pi -e 'use strict; +stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24590,88 +27098,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24683,92 +27109,13 @@ }eg ' expout || exit 77 stderr: -./calc.at:1336: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) +syntax error +./calc.at:1340: cat stderr ./calc.at:1337: cat stderr +./calc.at:1338: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24779,16 +27126,18 @@ }eg ' expout || exit 77 input: +./calc.at:1340: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +syntax error +input: | (#) + (#) = 2222 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: $PREPARSER ./calc input -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +syntax error + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1338: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -24911,8 +27260,25 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1341: cat stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -25034,8 +27400,17 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: input: -./calc.at:1336: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | 1//2 +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1340: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25045,8 +27420,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1337: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25056,185 +27431,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) +syntax error ./calc.at:1337: cat stderr + | error +./calc.at:1340: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr input: -input: - | (1 + #) = 1111 -./calc.at:1336: $PREPARSER ./calc input | (1 + #) = 1111 -./calc.at:1337: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1337: $PREPARSER ./calc input +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25244,6 +27458,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error + | (!!) + (1 2) = 1 +./calc.at:1338: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -25344,8 +27562,11 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1341: cat stderr Starting parse Entering state 0 Stack now 0 @@ -25444,7 +27665,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1334: cat stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +input: + | error +./calc.at:1341: $PREPARSER ./calc input ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -25455,7 +27681,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25465,265 +27692,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1337: cat stderr +syntax error +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1336: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1337: cat stderr +./calc.at:1340: cat stderr +./calc.at:1338: cat stderr input: -stderr: input: +input: + | 1 = 2 = 3 +./calc.at:1340: $PREPARSER ./calc input | (# + 1) = 1111 -./calc.at:1336: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | (# + 1) = 1111 -stderr: ./calc.at:1337: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -25820,9 +27830,10 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -25919,7 +27930,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1334: "$PERL" -pi -e 'use strict; +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +syntax error +./calc.at:1341: cat stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25929,7 +27946,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25939,7 +27956,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; +input: + | 1 = 2 = 3 +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25949,120 +27969,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: cat stderr -input: -./calc.at:1336: cat stderr -./calc.at:1337: cat stderr - | (1 + #) = 1111 -./calc.at:1334: $PREPARSER ./calc input stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +stderr: +stdout: +./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +./calc.at:1337: cat stderr input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1337: $PREPARSER ./calc input +syntax error +./calc.at:1340: cat stderr + | (* *) + (*) + (*) +input: +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + input: | (1 + # + 1) = 1111 -./calc.at:1336: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input + | + | +1 stderr: stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -26177,109 +28120,26 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./calc.at:1340: $PREPARSER ./calc input ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: +./calc.at:1342: $PREPARSER ./calc input +syntax error Starting parse Entering state 0 Stack now 0 @@ -26394,12 +28254,10 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' stderr: -stdout: -./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26409,6 +28267,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26419,18 +28284,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: cat stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26440,132 +28306,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: ./calc.at:1337: cat stderr -./calc.at:1334: cat stderr + | 1 2 +./calc.at:1342: $PREPARSER ./calc input input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 +./calc.at:1338: cat stderr +stderr: +./calc.at:1340: cat stderr +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1338: $PREPARSER ./calc input + | +1 +./calc.at:1341: $PREPARSER ./calc input input: -./calc.at:1336: cat stderr +stderr: +./calc.at:1340: $PREPARSER ./calc /dev/null +stderr: input: +syntax error +syntax error + | 1 + 2 * 3 + !+ ++ stderr: - | (1 + 1) / (1 - 1) - | (# + 1) = 1111 -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error + | (1 + 1) / (1 - 1) stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +./calc.at:1337: $PREPARSER ./calc input +stderr: +syntax error ./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -26707,111 +28484,22 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1336: $PREPARSER ./calc input -stderr: +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1338: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) stderr: -error: null divisor -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Stack now 0 @@ -26953,14 +28641,7 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 2 -stderr: -./calc.at:1338: $PREPARSER ./calc input -stderr: -error: null divisor -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26970,7 +28651,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26981,8 +28663,7 @@ }eg ' expout || exit 77 stderr: -1.3: syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26992,7 +28673,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr +./calc.at:1342: cat stderr ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27003,255 +28684,174 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: cat stderr +./calc.at:1340: cat stderr +stderr: +stdout: +./calc.at:1341: cat stderr +./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: - | (1 + # + 1) = 1111 +./calc.at:1337: cat stderr +input: +./calc.at:1341: $PREPARSER ./calc /dev/null + | 1//2 +./calc.at:1342: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1340: $PREPARSER ./calc input +stderr: +./calc.at:1343: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: 472. calc.at:1337: ok -./calc.at:1334: $PREPARSER ./calc input -./calc.at:1336: cat stderr +syntax error +syntax error ./calc.at:1338: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1343: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -471. calc.at:1336: ok - | 1//2 +input: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !* ++ ./calc.at:1338: $PREPARSER ./calc input stderr: -1.3: syntax error +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1341: cat stderr +stderr: +1.14: memory exhausted ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1343: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1342: cat stderr +syntax error, unexpected number +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1340: cat stderr +stderr: +1.14: memory exhausted + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1341: $PREPARSER ./calc input +stderr: +stderr: +syntax error, unexpected number +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | error +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (!!) + (1 2) = 1 +syntax error +./calc.at:1340: $PREPARSER ./calc input +478. calc.at:1344: testing Calculator parse.error=verbose ... +./calc.at:1338: cat stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27261,7 +28861,67 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error +error: 2222 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1344: mv calc.y.tmp calc.y +stderr: +stderr: +syntax error +error: 2222 != 1 +./calc.at:1341: cat stderr +input: +./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1341: $PREPARSER ./calc input + | 1//2 +./calc.at:1343: $PREPARSER ./calc input +stderr: +stderr: +syntax error +error: 2222 != 1 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +error: 2222 != 1 +./calc.at:1342: cat stderr +stderr: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27272,305 +28932,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1334: cat stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1340: cat stderr ./calc.at:1338: cat stderr +stderr: +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (1 + 1) / (1 - 1) -./calc.at:1334: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1342: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error +input: + | (- *) + (1 2) = 1 +./calc.at:1340: $PREPARSER ./calc input stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 ./calc.at:1338: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +======== Testing with C++ standard flags: '' +stderr: +./calc.at:1343: cat stderr stderr: +syntax error stderr: -1.1: syntax error +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.6: syntax error: invalid character: '#' +syntax error +syntax error +error: 2222 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +input: +input: stderr: -1.1: syntax error -./calc.at:1334: "$PERL" -pi -e 'use strict; +stderr: + | (- *) + (1 2) = 1 + | error +./calc.at:1341: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1343: $PREPARSER ./calc input +syntax error +syntax error +error: 2222 != 1 +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27580,6 +29011,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27590,35 +29025,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -475. calc.at:1341: testing Calculator %verbose ... -./calc.at:1341: mv calc.y.tmp calc.y - -./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -476. calc.at:1342: testing Calculator %yacc ... -./calc.at:1342: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1334: cat stderr -./calc.at:1338: cat stderr -470. calc.at:1334: ok -input: - | 1 = 2 = 3 -./calc.at:1338: $PREPARSER ./calc input - -stderr: -1.7: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -stderr: -stdout: -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27628,79 +29035,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1340: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - +stderr: +stderr: +syntax error, unexpected invalid token +syntax error +syntax error +error: 2222 != 1 +./calc.at:1340: cat stderr ./calc.at:1338: cat stderr +./calc.at:1342: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 input: -./calc.at:1340: $PREPARSER ./calc input | | +1 -./calc.at:1338: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -2.1: syntax error -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -477. calc.at:1343: testing Calculator parse.error=detailed ... -./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1343: mv calc.y.tmp calc.y - -./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | 1 2 + | (* *) + (*) + (*) +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1340: $PREPARSER ./calc input -./calc.at:1338: cat stderr -stderr: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: $PREPARSER ./calc /dev/null -stderr: -syntax error -stderr: -1.1: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27710,7 +29061,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27720,33 +29071,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1338: cat stderr -input: -input: - | 1//2 -./calc.at:1340: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1338: $PREPARSER ./calc input stderr: +./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: syntax error +syntax error +syntax error ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +./calc.at:1343: cat stderr stderr: syntax error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1338: "$PERL" -pi -e 'use strict; +syntax error +syntax error +input: +input: +./calc.at:1341: cat stderr + | (# + 1) = 1111 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27756,7 +29102,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +stderr: +./calc.at:1343: $PREPARSER ./calc input +input: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27766,28 +29116,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: cat stderr -./calc.at:1340: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: - | error +1.2: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1340: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 +syntax error, unexpected '=' +stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1340: cat stderr stderr: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +stdout: +syntax error, unexpected '=' syntax error -./calc.at:1340: "$PERL" -pi -e 'use strict; +syntax error +syntax error +./calc.at:1342: cat stderr +./types.at:139: ./check +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27797,7 +29151,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1340: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1342: $PREPARSER ./calc /dev/null +stderr: +stderr: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27807,29 +29171,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1338: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1340: $PREPARSER ./calc input -stderr: -input: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1338: $PREPARSER ./calc input stderr: stderr: +./calc.at:1338: cat stderr syntax error -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27839,7 +29186,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1341: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27849,29 +29203,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1338: cat stderr +stderr: +./calc.at:1343: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1341: $PREPARSER ./calc input | | +1 -input: -./calc.at:1340: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input stderr: -syntax error stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -syntax error stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error +syntax error, unexpected '+' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1342: cat stderr +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected '+' +input: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27882,7 +29240,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27893,32 +29250,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./calc.at:1338: cat stderr -./calc.at:1340: cat stderr -./scanner.at:330: $PREPARSER ./input -./calc.at:1340: $PREPARSER ./calc /dev/null -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1338: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1342: $PREPARSER ./calc input stderr: syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: syntax error -469. scanner.at:330: stderr: - ok -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error +syntax error +error: 4444 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1338: $PREPARSER ./calc input - -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27928,27 +29270,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr stderr: ./calc.at:1340: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1340: $PREPARSER ./calc input -stderr: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error syntax error syntax error syntax error error: 4444 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: +./calc.at:1343: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27958,8 +29297,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1343: $PREPARSER ./calc /dev/null +stderr: +input: +stderr: +syntax error, unexpected end of file +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27969,22 +29315,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -478. calc.at:1344: testing Calculator parse.error=verbose ... -./calc.at:1344: mv calc.y.tmp calc.y - -input: +stderr: +syntax error, unexpected end of file | 1 + 2 * 3 + !* ++ -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1340: $PREPARSER ./calc input stderr: -1.14: memory exhausted -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: cat stderr stderr: -1.14: memory exhausted +./calc.at:1341: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +memory exhausted +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +stderr: input: - | (!!) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: cat stderr +memory exhausted ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27995,20 +29340,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !* ++ +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: stderr: -syntax error -error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1338: cat stderr +./calc.at:1343: cat stderr +memory exhausted +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1342: $PREPARSER ./calc input input: - | (#) + (#) = 2222 -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1338: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1343: $PREPARSER ./calc input ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28019,20 +29374,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +473. calc.at:1338: ok stderr: ./calc.at:1340: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 input: - | (- *) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input stderr: syntax error -syntax error error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28042,12 +29410,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1338: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28057,35 +29420,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -stdout: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -./calc.at:1340: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1341: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c -input: stderr: - | (* *) + (*) + (*) -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1338: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28095,53 +29439,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -stderr: input: -./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | (#) + (#) = 2222 ./calc.at:1341: $PREPARSER ./calc input -syntax error -syntax error -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: cat stderr -stderr: +./calc.at:1343: cat stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1342: cat stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error input: stderr: - | (# + 1) = 1111 -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1342: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: -1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | (!!) + (1 2) = 1 +./calc.at:1343: $PREPARSER ./calc input +input: ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28152,33 +29465,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 stderr: -1.2: syntax error: invalid character: '#' - | 1 2 + | (- *) + (1 2) = 1 +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: $PREPARSER ./calc input -stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error +syntax error +error: 2222 != 1 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28188,19 +29487,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: ./calc.at:1340: cat stderr +syntax error, unexpected number +error: 2222 != 1 +479. calc.at:1346: testing Calculator api.pure=full %locations ... +./calc.at:1346: mv calc.y.tmp calc.y + +./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: syntax error -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -input: -./calc.at:1338: cat stderr - | 1 2 -./calc.at:1342: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28210,24 +29508,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -syntax error -stderr: - | (1 + # + 1) = 1111 -./calc.at:1338: $PREPARSER ./calc input -stderr: -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1341: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1343: cat stderr ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28239,17 +29521,37 @@ }eg ' expout || exit 77 input: - | 1//2 -./calc.at:1341: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !- ++ +input: + | (1 + #) = 1111 ./calc.at:1340: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1343: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1341: $PREPARSER ./calc input stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1342: cat stderr +stderr: +syntax error: invalid character: '#' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: +syntax error: invalid character: '#' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +input: + | (* *) + (*) + (*) +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28259,15 +29561,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1342: $PREPARSER ./calc input stderr: syntax error -./calc.at:1342: cat stderr -./calc.at:1338: cat stderr -input: -input: - | (1 + 1) / (1 - 1) -./calc.at:1338: $PREPARSER ./calc input +syntax error +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28278,11 +29577,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1342: $PREPARSER ./calc input +./calc.at:1343: cat stderr stderr: -1.11-17: error: null divisor -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +input: + | (* *) + (*) + (*) +./calc.at:1343: $PREPARSER ./calc input ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28294,20 +29596,22 @@ }eg ' expout || exit 77 stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1340: cat stderr -1.11-17: error: null divisor stderr: -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' input: - | 1 + 2 * 3 + !* ++ + | (# + 1) = 1111 ./calc.at:1340: $PREPARSER ./calc input -stderr: ./calc.at:1341: cat stderr -memory exhausted -./calc.at:1338: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28319,7 +29623,10 @@ ' expout || exit 77 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1342: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1341: $PREPARSER ./calc input +stderr: +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28329,36 +29636,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -stderr: -memory exhausted -./calc.at:1338: cat stderr -./calc.at:1341: $PREPARSER ./calc input +syntax error: invalid character: '#' stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1343: cat stderr ./calc.at:1342: cat stderr -473. calc.at:1338: ok -stderr: -syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error -./calc.at:1342: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: $PREPARSER ./calc input +syntax error: invalid character: '#' +input: + | 1 + 2 * 3 + !+ ++ stderr: -syntax error -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28369,8 +29661,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1340: cat stderr +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28380,26 +29681,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: cat stderr -./calc.at:1340: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1342: $PREPARSER ./calc input input: input: - | 1 = 2 = 3 -./calc.at:1341: $PREPARSER ./calc input - | (#) + (#) = 2222 + | (1 + # + 1) = 1111 + | 1 + 2 * 3 + !- ++ ./calc.at:1340: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input stderr: stderr: -syntax error -syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr syntax error: invalid character: '#' +stderr: ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -syntax error: invalid character: '#' syntax error: invalid character: '#' stderr: -syntax error +input: + | (1 + # + 1) = 1111 +./calc.at:1341: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28410,9 +29717,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: cat stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28422,27 +29739,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: -stdout: -479. calc.at:1346: testing Calculator api.pure=full %locations ... - | 1 = 2 = 3 -./calc.at:1342: $PREPARSER ./calc input stderr: +stdout: +./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1343: cat stderr +./calc.at:1342: cat stderr +./calc.at:1340: cat stderr input: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: mv calc.y.tmp calc.y - -./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | (1 + #) = 1111 -stderr: -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1341: cat stderr -stderr: -syntax error -./calc.at:1343: "$PERL" -ne ' +./calc.at:1344: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -28453,15 +29757,39 @@ || /\t/ )' calc.c -syntax error: invalid character: '#' input: + | 1 + 2 * 3 + !* ++ +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1340: $PREPARSER ./calc input +input: +memory exhausted +stderr: + | 1 + 2 * 3 + !* ++ +error: null divisor +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1341: $PREPARSER ./calc input +stderr: +memory exhausted +./calc.at:1341: cat stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -syntax error +memory exhausted +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -28475,28 +29803,18 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -stderr: +error: null divisor +./calc.at:1344: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error +memory exhausted stderr: -./calc.at:1342: cat stderr -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +error: null divisor +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28506,11 +29824,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 2 -./calc.at:1343: $PREPARSER ./calc input -stderr: -input: ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28521,25 +29834,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected number -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1342: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -./calc.at:1341: cat stderr -stderr: -syntax error -./calc.at:1341: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1340: cat stderr -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28550,26 +29845,23 @@ }eg ' expout || exit 77 stderr: -input: - | (# + 1) = 1111 -syntax error -./calc.at:1340: $PREPARSER ./calc input +error: null divisor stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1343: cat stderr -syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1342: cat stderr +input: input: stderr: + | (#) + (#) = 2222 + | 1 2 +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1340: cat stderr +stderr: ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28580,23 +29872,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1343: $PREPARSER ./calc input -syntax error: invalid character: '#' stderr: -./calc.at:1342: cat stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error, unexpected number +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $PREPARSER ./calc /dev/null +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +input: + | (#) + (#) = 2222 +474. calc.at:1340: ok +./calc.at:1342: $PREPARSER ./calc input +stderr: ./calc.at:1341: cat stderr stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected number +475. calc.at:1341: ok stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +syntax error: invalid character: '#' +syntax error: invalid character: '#' + +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28606,19 +29910,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: cat stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; + +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28628,7 +29921,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28638,58 +29931,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 ./calc.at:1343: cat stderr +./calc.at:1344: cat stderr +./calc.at:1342: cat stderr input: - | (1 + # + 1) = 1111 -./calc.at:1340: $PREPARSER ./calc input input: - | error + | (1 + #) = 1111 ./calc.at:1343: $PREPARSER ./calc input + | 1//2 +./calc.at:1344: $PREPARSER ./calc input +input: + | (1 + #) = 1111 stderr: -syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr -stderr: -syntax error, unexpected invalid token -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: $PREPARSER ./calc input stderr: -input: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1342: $PREPARSER ./calc input +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... stderr: -syntax error, unexpected invalid token -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +./calc.at:1347: mv calc.y.tmp calc.y + +syntax error: invalid character: '#' ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1341: cat stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +481. calc.at:1348: testing Calculator parse.error=detailed %locations ... +./calc.at:1348: mv calc.y.tmp calc.y + +./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28699,15 +29976,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1343: cat stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28717,7 +29986,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; +stderr: +stdout: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28727,38 +29998,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1342: cat stderr +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1343: cat stderr +./calc.at:1344: cat stderr input: -stderr: -syntax error -error: 2222 != 1 - | 1 = 2 = 3 +input: + | (# + 1) = 1111 ./calc.at:1343: $PREPARSER ./calc input stderr: -syntax error, unexpected '=' +syntax error: invalid character: '#' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '=' -./calc.at:1342: cat stderr -./calc.at:1340: cat stderr -input: - | (!!) + (1 2) = 1 input: +stderr: + | (# + 1) = 1111 ./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error: invalid character: '#' + | error +./calc.at:1344: $PREPARSER ./calc input stderr: - | (1 + 1) / (1 - 1) -./calc.at:1340: $PREPARSER ./calc input -syntax error -error: 2222 != 1 +stderr: +syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error, unexpected invalid token +syntax error: invalid character: '#' ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28769,23 +30037,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr -stderr: -stderr: -error: null divisor -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -error: 2222 != 1 ./calc.at:1343: cat stderr -stderr: -error: null divisor -input: - | (- *) + (1 2) = 1 -input: - | - | +1 -./calc.at:1343: $PREPARSER ./calc input ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28796,18 +30048,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected '+' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28817,25 +30058,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./types.at:139: $PREPARSER ./test -stderr: -syntax error, unexpected '+' -syntax error -syntax error -error: 2222 != 1 +./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: +./calc.at:1344: cat stderr ./calc.at:1342: cat stderr +./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (1 + # + 1) = 1111 +./calc.at:1343: $PREPARSER ./calc input +input: stderr: + | 1 = 2 = 3 +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (1 + # + 1) = 1111 +syntax error, unexpected '=' ./calc.at:1342: $PREPARSER ./calc input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: cat stderr +stderr: +syntax error, unexpected '=' +syntax error: invalid character: '#' ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28846,8 +30094,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28858,19 +30106,21 @@ }eg ' expout || exit 77 stderr: -syntax error -syntax error -error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -474. calc.at:1340: ok ./calc.at:1343: cat stderr -./calc.at:1341: cat stderr +syntax error: invalid character: '#' +./calc.at:1344: cat stderr input: -./calc.at:1343: $PREPARSER ./calc /dev/null - - | (* *) + (*) + (*) -./calc.at:1341: $PREPARSER ./calc input stderr: + | (1 + 1) / (1 - 1) +./calc.at:1343: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +error: null divisor +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28881,27 +30131,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of file -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected end of file -stderr: -./calc.at:1342: cat stderr -syntax error -syntax error -syntax error -stderr: -stdout: -input: -./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | (* *) + (*) + (*) -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -ne ' +./calc.at:1346: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -28913,25 +30145,13 @@ )' calc.c stderr: -syntax error -syntax error -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error, unexpected '+' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor stderr: -syntax error -syntax error -syntax error +syntax error, unexpected '+' input: +./calc.at:1342: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -28945,11 +30165,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1343: cat stderr -stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28959,12 +30176,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1343: $PREPARSER ./calc input stderr: -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1343: cat stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28974,36 +30190,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) +477. calc.at:1343: ok stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: mv calc.y.tmp calc.y - -input: -./calc.at:1342: cat stderr -./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1344: cat stderr +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 +error: null divisor +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | 1 2 -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc /dev/null stderr: -syntax error, unexpected number +syntax error, unexpected end of input ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -syntax error, unexpected number -./calc.at:1341: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1343: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +stderr: +stderr: + +syntax error, unexpected end of input +1.3: syntax error +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29013,11 +30236,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: $PREPARSER ./calc input -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -stderr: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29028,54 +30246,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -input: -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: $PREPARSER ./test -stderr: -stdout: -input: - | (!!) + (1 2) = 1 -./types.at:139: ./check ./calc.at:1344: cat stderr -./calc.at:1343: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: - | 1 + 2 * 3 + !+ ++ -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1341: $PREPARSER ./calc input +./calc.at:1346: cat stderr +./calc.at:1342: cat stderr input: - | 1 + 2 * 3 + !- ++ | 1//2 +./calc.at:1346: $PREPARSER ./calc input stderr: +1.3: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... +476. calc.at:1342: ok + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: ./calc.at:1344: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1350: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +1.3: syntax error stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -======== Testing with C++ standard flags: '' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29085,7 +30289,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: "$PERL" -pi -e 'use strict; + +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1346: cat stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29095,7 +30306,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; +input: + | error +./calc.at:1346: $PREPARSER ./calc input +stderr: +./calc.at:1344: cat stderr +1.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... +./calc.at:1351: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29106,67 +30337,106 @@ }eg ' expout || exit 77 stderr: -./calc.at:1344: cat stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -./calc.at:1342: cat stderr +stdout: +./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1346: cat stderr +./calc.at:1348: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1348: $PREPARSER ./calc input input: + | 1 = 2 = 3 +./calc.at:1346: $PREPARSER ./calc input stderr: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.7: syntax error +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error -./calc.at:1344: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1343: $PREPARSER ./calc input + | 1 2 +./calc.at:1348: $PREPARSER ./calc input +stderr: +1.7: syntax error +stderr: +1.3: syntax error, unexpected number +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1351: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1351: $PREPARSER ./calc input +stderr: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS input: + | 1 2 +./calc.at:1351: $PREPARSER ./calc input stderr: +1.3: syntax error, unexpected number +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number error: 2222 != 1 - | 1 + 2 * 3 + !* ++ -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected invalid token ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: stderr: -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number error: 2222 != 1 -syntax error, unexpected invalid token -memory exhausted -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -memory exhausted -./calc.at:1341: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected number +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29176,13 +30446,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -memory exhausted -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -stderr: -memory exhausted -./calc.at:1342: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1348: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29192,14 +30457,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1342: $PREPARSER ./calc input + | 1//2 +./calc.at:1348: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +1.3: syntax error, unexpected number +stderr: +stdout: +./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29209,36 +30481,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + input: - | (* *) + (*) + (*) -./calc.at:1343: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1351: cat stderr stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1//2 +./calc.at:1351: $PREPARSER ./calc input +input: stderr: -stdout: -./types.at:139: ./check + | 1 2 +./calc.at:1347: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -./calc.at:1341: cat stderr -syntax error, unexpected '=' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.3: syntax error stderr: -syntax error, unexpected '=' -./calc.at:1342: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29248,15 +30538,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29266,8 +30548,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29278,8 +30558,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29290,44 +30569,41 @@ }eg ' expout || exit 77 ./calc.at:1344: cat stderr +./calc.at:1347: cat stderr +./calc.at:1348: cat stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1342: cat stderr + | error +./calc.at:1348: $PREPARSER ./calc input input: + | 1//2 +./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1341: cat stderr -stderr: +1.1: syntax error, unexpected invalid token +input: + | (- *) + (1 2) = 1 ./calc.at:1344: $PREPARSER ./calc input +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '+' -input: -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1342: $PREPARSER ./calc input -stderr: -syntax error, unexpected '+' -input: -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1343: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1341: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr stderr: -syntax error: invalid character: '#' +1.3: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +input: stderr: -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +1.1: syntax error, unexpected invalid token +./calc.at:1346: $PREPARSER ./calc input stderr: +input: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29338,13 +30614,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1344: cat stderr stderr: -stderr: -stdout: -./calc.at:1342: "$PERL" -pi -e 'use strict; +1.3: syntax error +input: + | (* *) + (*) + (*) +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29354,9 +30630,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: $PREPARSER ./calc /dev/null -./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1341: "$PERL" -pi -e 'use strict; +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29366,10 +30644,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +2.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected end of input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -ne ' +2.1: syntax error +stderr: +stdout: +./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1350: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -29378,11 +30660,9 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c + )' calc.c calc.h -./calc.at:1342: cat stderr -stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29392,13 +30672,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected end of input -input: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1341: cat stderr - | (# + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1343: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -29412,10 +30687,25 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1350: $PREPARSER ./calc input stderr: -./calc.at:1346: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1347: cat stderr +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1351: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected invalid token stderr: +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1346: cat stderr + | error +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29426,44 +30716,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1341: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1344: cat stderr -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -memory exhausted -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -syntax error: invalid character: '#' stderr: +1.1: syntax error, unexpected invalid token +1.1: syntax error input: -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1344: $PREPARSER ./calc input -memory exhausted + | 1 2 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc /dev/null stderr: -syntax error: invalid character: '#' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1346: $PREPARSER ./calc input +./calc.at:1348: cat stderr +1.3: syntax error, unexpected number +1.1: syntax error +./calc.at:1344: cat stderr stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29473,27 +30743,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -stderr: -1.3: syntax error +1.1: syntax error ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1342: cat stderr stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected number +stderr: +input: +1.1: syntax error + | 1 = 2 = 3 +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29503,9 +30761,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error +./calc.at:1348: $PREPARSER ./calc input input: -./calc.at:1344: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1347: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29515,10 +30776,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1351: cat stderr +stderr: +input: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29528,48 +30791,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.7: syntax error, unexpected '=' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1351: $PREPARSER ./calc input input: -./calc.at:1344: cat stderr stderr: -./calc.at:1341: cat stderr - | (#) + (#) = 2222 -./calc.at:1343: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 = 2 = 3 stderr: - | (!!) + (1 2) = 1 -input: -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1346: cat stderr -syntax error: invalid character: '#' +input: + | 1//2 +./calc.at:1350: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 +1.7: syntax error, unexpected '=' +1.7: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: $PREPARSER ./calc input -syntax error, unexpected number -error: 2222 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: +1.7: syntax error, unexpected '=' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1346: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error, unexpected '=' + | 1 + 2 * 3 + !- ++ +./calc.at:1344: $PREPARSER ./calc input +1.7: syntax error + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: stderr: -1.3: syntax error +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29579,8 +30857,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29591,10 +30868,7 @@ }eg ' expout || exit 77 stderr: -1.3: syntax error -./calc.at:1343: cat stderr -./calc.at:1342: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29604,7 +30878,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1348: cat stderr +./calc.at:1351: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29614,13 +30896,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr input: - | (1 + #) = 1111 -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1344: cat stderr + | + | +1 +./calc.at:1351: $PREPARSER ./calc input input: +./calc.at:1350: cat stderr stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +2.1: syntax error, unexpected '+' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr + | + | +1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29630,39 +30920,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1342: $PREPARSER ./calc input input: + | + | +1 stderr: +./calc.at:1347: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' - | (- *) + (1 2) = 1 -./calc.at:1344: $PREPARSER ./calc input -error: null divisor -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1344: cat stderr + | (!!) + (1 2) = 1 input: stderr: - | (1 + 1) / (1 - 1) -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input +2.1: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor +2.1: syntax error, unexpected '+' + | error +2.1: syntax error, unexpected '+' +./calc.at:1350: $PREPARSER ./calc input stderr: -./calc.at:1346: cat stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -error: null divisor -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error input: -./calc.at:1343: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected invalid token +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29672,12 +30963,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1346: $PREPARSER ./calc input -stderr: -error: null divisor stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29687,10 +30974,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr -1.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +memory exhausted +1.1: syntax error, unexpected invalid token +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29700,10 +30992,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: cat stderr stderr: -1.1: syntax error -input: -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1351: $PREPARSER ./calc /dev/null +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29713,12 +31005,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -./calc.at:1342: cat stderr - | (# + 1) = 1111 -./calc.at:1343: $PREPARSER ./calc input -input: -./calc.at:1346: "$PERL" -pi -e 'use strict; +memory exhausted +./calc.at:1348: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error, unexpected end of file +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29728,27 +31020,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr stderr: -syntax error: invalid character: '#' -476. calc.at:1342: ok -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1341: cat stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected end of file +1.1: syntax error, unexpected end of file +./calc.at:1350: cat stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $PREPARSER ./calc /dev/null +./calc.at:1346: cat stderr stderr: stderr: -syntax error: invalid character: '#' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -475. calc.at:1341: ok -./calc.at:1346: cat stderr - +1.1: syntax error, unexpected end of file +stderr: +1.1: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29759,7 +31045,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; +input: +input: + | 1 = 2 = 3 +./calc.at:1350: $PREPARSER ./calc input +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29769,22 +31062,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1343: cat stderr -./calc.at:1344: cat stderr - | 1 = 2 = 3 -./calc.at:1346: $PREPARSER ./calc input - -stderr: -1.7: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.7: syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1344: $PREPARSER ./calc input + | (- *) + (1 2) = 1 stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29794,44 +31075,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + # + 1) = 1111 -./calc.at:1343: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ +1.7: syntax error, unexpected '=' stderr: -syntax error: invalid character: '#' -./calc.at:1344: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr +./calc.at:1348: cat stderr +./calc.at:1351: cat stderr stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -481. calc.at:1348: testing Calculator parse.error=detailed %locations ... -./calc.at:1348: mv calc.y.tmp calc.y - -./calc.at:1346: cat stderr -./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1350: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.7: syntax error, unexpected '=' +======== Testing with C++ standard flags: '' input: - | - | +1 -./calc.at:1346: $PREPARSER ./calc input -stderr: -./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1343: "$PERL" -pi -e 'use strict; +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29841,12 +31108,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1343: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1348: $PREPARSER ./calc input +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (#) + (#) = 2222 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29856,7 +31123,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: "$PERL" -pi -e 'use strict; + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1351: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29866,29 +31141,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1343: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -memory exhausted +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -stderr: -./calc.at:1346: cat stderr -memory exhausted stderr: -stdout: -./types.at:139: $PREPARSER ./test +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1347: cat stderr stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1350: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1346: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29898,8 +31181,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +./calc.at:1347: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29909,40 +31196,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr -======== Testing with C++ standard flags: '' -./calc.at:1346: $PREPARSER ./calc /dev/null -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1344: cat stderr -stderr: -1.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -1.1: syntax error - | (#) + (#) = 2222 -477. calc.at:1343: ok -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1348: cat stderr + | + | +1 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input stderr: -./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 stderr: -stdout: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' +input: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29953,47 +31227,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - -./calc.at:1344: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1347: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - + | (!!) + (1 2) = 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1351: cat stderr stderr: +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1344: $PREPARSER ./calc input +stderr: +./calc.at:1344: cat stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +1.10: syntax error +1.16: syntax error +2.1: syntax error, unexpected '+' stderr: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 stderr: -syntax error: invalid character: '#' -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30004,23 +31263,7 @@ }eg ' expout || exit 77 input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1346: cat stderr -stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30030,40 +31273,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr + | (1 + #) = 1111 +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: $PREPARSER ./calc input +stdout: input: -stderr: | (!!) + (1 2) = 1 -./calc.at:1346: $PREPARSER ./calc input -483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... -./calc.at:1351: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -input: - | (# + 1) = 1111 -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test stderr: +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 syntax error: invalid character: '#' -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1350: cat stderr ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 stderr: -./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1347: cat stderr +./calc.at:1348: cat stderr +./calc.at:1350: $PREPARSER ./calc /dev/null stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of file +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1346: cat stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30073,7 +31333,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1347: $PREPARSER ./calc input +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.1: syntax error, unexpected end of file + | (- *) + (1 2) = 1 +stderr: +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1351: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30085,35 +31363,39 @@ }eg ' expout || exit 77 input: -./types.at:139: ./check - | 1 2 -./calc.at:1344: cat stderr -./calc.at:1347: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1346: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ stderr: input: -1.3: syntax error - | (1 + # + 1) = 1111 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 ./calc.at:1346: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | (- *) + (1 2) = 1 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1344: cat stderr +./calc.at:1350: cat stderr stderr: -syntax error: invalid character: '#' -stderr: -1.3: syntax error stderr: -1.4: syntax error -1.12: syntax error +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; +input: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30123,10 +31405,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30136,8 +31415,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +input: + | (# + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1348: cat stderr +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +input: +input: +stderr: +syntax error: invalid character: '#' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr + | 1 + 2 * 3 + !- ++ + | (* *) + (*) + (*) +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30147,29 +31455,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1346: $PREPARSER ./calc input stderr: -error: null divisor -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1346: cat stderr stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1351: cat stderr + | (- *) + (1 2) = 1 ./calc.at:1347: $PREPARSER ./calc input -error: null divisor stderr: -1.3: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.3: syntax error - | (* *) + (*) + (*) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -pi -e 'use strict; +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30179,7 +31488,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: "$PERL" -pi -e 'use strict; +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30189,29 +31499,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1347: cat stderr -478. calc.at:1344: ok -input: - | error + | (* *) + (*) + (*) +./calc.at:1351: $PREPARSER ./calc input +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1346: cat stderr stderr: -./calc.at:1347: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error +stdout: +./types.at:139: ./check stderr: - -1.1: syntax error +stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check stderr: -1.1: syntax error -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30221,10 +31525,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1344: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30234,30 +31542,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: $PREPARSER ./calc input -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -stderr: -484. calc.at:1353: testing Calculator %debug ... -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1353: mv calc.y.tmp calc.y - -input: -./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | 1 = 2 = 3 -./calc.at:1347: $PREPARSER ./calc input +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 stderr: input: - | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !* ++ ./calc.at:1346: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.14: memory exhausted ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +input: +./calc.at:1350: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1348: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30267,17 +31570,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error -./calc.at:1346: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1346: $PREPARSER ./calc input stderr: -1.14: memory exhausted -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.14: memory exhausted -./calc.at:1347: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +input: +syntax error: invalid character: '#' +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30287,6 +31589,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1348: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1350: $PREPARSER ./calc input +stderr: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30297,27 +31606,84 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -./calc.at:1347: cat stderr +stderr: +stderr: input: - | - | +1 +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1351: cat stderr + | (* *) + (*) + (*) ./calc.at:1347: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: cat stderr +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 + 2 * 3 + !+ ++ +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1348: $PREPARSER ./calc input input: +stderr: | (#) + (#) = 2222 -./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: $PREPARSER ./calc input stderr: +./calc.at:1344: cat stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: + | (1 + 1) / (1 - 1) +./calc.at:1344: $PREPARSER ./calc input stderr: -2.1: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: + | 1 + 2 * 3 + !- ++ +error: null divisor +./calc.at:1351: $PREPARSER ./calc input 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30328,17 +31694,9 @@ }eg ' expout || exit 77 stderr: -2.1: syntax error -./calc.at:1346: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1346: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30348,9 +31706,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./calc.at:1350: cat stderr +error: null divisor ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30361,29 +31718,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./scanner.at:326: $PREPARSER ./input +./calc.at:1348: cat stderr ./calc.at:1347: cat stderr -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr input: -./calc.at:1347: $PREPARSER ./calc /dev/null - | (# + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -stderr: -463. scanner.at:326: ok -1.1: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +input: +./calc.at:1346: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1350: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1348: $PREPARSER ./calc input stderr: -stdout: -1.2: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 stderr: -1.1: syntax error -./scanner.at:326: $PREPARSER ./input -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30393,14 +31748,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - -stderr: -1.2: syntax error: invalid character: '#' -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -464. scanner.at:326: ok -./calc.at:1347: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30410,26 +31758,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1347: $PREPARSER ./calc input stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1346: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.14: memory exhausted + | (1 + #) = 1111 +./calc.at:1346: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1351: cat stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30439,34 +31785,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -stderr: -1.6: syntax error: invalid character: '#' input: - | (!!) + (1 2) = 1 -./calc.at:1347: $PREPARSER ./calc input +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1351: $PREPARSER ./calc input +1.14: memory exhausted stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1354: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.6: syntax error: invalid character: '#' +478. calc.at:1344: 1.14: memory exhausted +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +stderr: +1.14: memory exhausted +./calc.at:1350: cat stderr ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30477,17 +31810,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1355: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1347: "$PERL" -pi -e 'use strict; +input: +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30497,27 +31821,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (* *) + (*) + (*) +./calc.at:1350: $PREPARSER ./calc input input: - | (1 + 1) / (1 - 1) -./calc.at:1346: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1347: cat stderr +./calc.at:1346: cat stderr + | 1 + 2 * 3 + !- ++ + +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1348: cat stderr +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr +input: stderr: -1.11-17: error: null divisor -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1346: $PREPARSER ./calc input +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: -input: - | (- *) + (1 2) = 1 -1.11-17: error: null divisor -./calc.at:1347: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +input: ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1346: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30527,11 +31870,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1350: cat stderr +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1350: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30542,58 +31886,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -./calc.at:1348: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -479. calc.at:1346: ok -./calc.at:1347: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1348: $PREPARSER ./calc input -stderr: -input: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - | (* *) + (*) + (*) -./calc.at:1347: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error: invalid character: '#' stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -stdout: -./types.at:139: $PREPARSER ./test stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30603,24 +31905,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1347: cat stderr - | 1 2 -./calc.at:1348: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1347: cat stderr stderr: -1.3: syntax error, unexpected number +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !+ ++ -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1347: $PREPARSER ./calc input -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30631,34 +31926,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1348: cat stderr -input: -487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1357: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -input: - | 1 + 2 * 3 + !- ++ ./calc.at:1347: $PREPARSER ./calc input - | 1//2 -./calc.at:1348: $PREPARSER ./calc input stderr: +1.14: memory exhausted stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1346: cat stderr +input: + | (#) + (#) = 2222 +input: +./calc.at:1351: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1346: $PREPARSER ./calc input stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.14: memory exhausted +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30668,7 +31950,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1350: cat stderr +1.6: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30678,39 +31968,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: cat stderr -./calc.at:1347: cat stderr input: -input: - | error -stderr: | 1 + 2 * 3 + !* ++ -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1348: $PREPARSER ./calc input -stdout: +./calc.at:1350: $PREPARSER ./calc input stderr: -1.14: memory exhausted -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: 1.14: memory exhausted +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1350: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.14: memory exhausted +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30720,7 +31992,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; +484. calc.at:1353: testing Calculator %debug ... +./calc.at:1353: mv calc.y.tmp calc.y + +./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: + | (#) + (#) = 2222 +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30730,56 +32009,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr ./calc.at:1348: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1350: $PREPARSER ./calc input -input: - | (#) + (#) = 2222 stderr: -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | 1 = 2 = 3 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' + | (1 + #) = 1111 ./calc.at:1348: $PREPARSER ./calc input -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -stdout: -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: ./check -stderr: -1.7: syntax error, unexpected '=' +1.6: syntax error: invalid character: '#' ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: -stderr: - | 1 2 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.7: syntax error, unexpected '=' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected number -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1346: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30789,9 +32042,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1351: cat stderr +input: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30801,10 +32054,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected number ./calc.at:1348: cat stderr -./calc.at:1347: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1350: cat stderr +stderr: +1.11-17: error: null divisor +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30814,31 +32071,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | - | +1 -./calc.at:1348: $PREPARSER ./calc input input: - | (1 + #) = 1111 -./calc.at:1347: $PREPARSER ./calc input + | (# + 1) = 1111 input: +./calc.at:1348: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1350: $PREPARSER ./calc input + | (1 + #) = 1111 stderr: -1.6: syntax error: invalid character: '#' stderr: -2.1: syntax error, unexpected '+' +./calc.at:1351: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.2: syntax error: invalid character: '#' ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1350: $PREPARSER ./calc input stderr: stderr: +1.11-17: error: null divisor 1.6: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr stderr: -2.1: syntax error, unexpected '+' -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30849,10 +32107,15 @@ }eg ' expout || exit 77 stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (1 + #) = 1111 +./calc.at:1347: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stdout: -./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30863,20 +32126,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1351: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: +1.6: syntax error: invalid character: '#' +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30887,48 +32148,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1347: $PREPARSER ./calc input -input: ./calc.at:1348: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $PREPARSER ./calc input -stderr: -./calc.at:1348: $PREPARSER ./calc /dev/null -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: ./calc.at:1350: cat stderr -stderr: -stdout: -stderr: -1.2: syntax error: invalid character: '#' -1.1: syntax error, unexpected end of file -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +./calc.at:1346: cat stderr +1.6: syntax error: invalid character: '#' input: - | error -stderr: -stderr: +input: + | (1 + # + 1) = 1111 + | (1 + #) = 1111 +./calc.at:1348: $PREPARSER ./calc input ./calc.at:1350: $PREPARSER ./calc input -1.1: syntax error, unexpected end of file -stderr: ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30939,12 +32169,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected invalid token +stderr: +1.6: syntax error: invalid character: '#' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr +479. calc.at:1346: ok +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' input: -1.1: syntax error, unexpected invalid token +./calc.at:1347: cat stderr + | (# + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input +stderr: +input: +1.2: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30955,12 +32199,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' - | 1 2 -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30971,15 +32209,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1348: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (# + 1) = 1111 +./calc.at:1347: $PREPARSER ./calc input + stderr: -1.3: syntax error, unexpected number -input: +stderr: +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' ./calc.at:1350: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1347: $PREPARSER ./calc input +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr stderr: ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -30991,34 +32230,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1348: $PREPARSER ./calc input input: -stderr: - | 1 = 2 = 3 -stderr: +1.2: syntax error: invalid character: '#' + | (# + 1) = 1111 ./calc.at:1350: $PREPARSER ./calc input -./calc.at:1351: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error, unexpected '=' +1.2: syntax error: invalid character: '#' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 input: +./calc.at:1351: cat stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31029,10 +32249,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1351: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1348: $PREPARSER ./calc input stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; +input: +1.2: syntax error: invalid character: '#' +stderr: +1.11-17: error: null divisor +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +stderr: +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1347: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31042,14 +32271,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error, unexpected '=' +1.11-17: error: null divisor stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +1.6: syntax error: invalid character: '#' ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr +stdout: +input: stderr: -./calc.at:1348: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1350: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1347: $PREPARSER ./calc input +./types.at:139: ./check +1.6: syntax error: invalid character: '#' +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31059,12 +32294,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1348: $PREPARSER ./calc input -input: stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +1.6: syntax error: invalid character: '#' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31075,30 +32308,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1350: cat stderr -./calc.at:1351: cat stderr -stderr: -1.11-17: error: null divisor -stderr: -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stdout: -input: -./types.at:139: $PREPARSER ./test stderr: -1.11-17: error: null divisor input: +./calc.at:1348: cat stderr +1.6: syntax error: invalid character: '#' + | (1 + # + 1) = 1111 +./calc.at:1350: $PREPARSER ./calc input stderr: - | - | +1 -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1351: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31108,18 +32329,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: $PREPARSER ./calc input - | error -./calc.at:1351: $PREPARSER ./calc input +./calc.at:1354: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + stderr: -1.1: syntax error, unexpected invalid token -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +481. calc.at:1348: ok +input: + | (1 + 1) / (1 - 1) +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: +1.11-17: error: null divisor ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error, unexpected '+' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31129,14 +32357,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr stderr: -1.1: syntax error, unexpected invalid token -stderr: -2.1: syntax error, unexpected '+' +1.11-17: error: null divisor + +./calc.at:1350: cat stderr input: -======== Testing with C++ standard flags: '' - | (- *) + (1 2) = 1 -./calc.at:1348: $PREPARSER ./calc input ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31147,12 +32373,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) +input: +./calc.at:1347: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: cat stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +stderr: +1.11-17: error: null divisor +483. calc.at:1351: ok ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31163,14 +32400,73 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1350: cat stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +482. calc.at:1350: ok ./calc.at:1347: cat stderr + +486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1355: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +480. calc.at:1347: ok +./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1357: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + + +./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1358: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1360: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: stdout: -./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1350: cat stderr -./calc.at:1351: cat stderr -480. calc.at:1347: ok -./calc.at:1353: "$PERL" -ne ' +./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1355: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -31179,19 +32475,23 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c + )' calc.c calc.h -./calc.at:1350: $PREPARSER ./calc /dev/null -input: -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -input: -stderr: -./calc.at:1351: $PREPARSER ./calc input stderr: +stdout: +./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1360: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -31205,1063 +32505,4230 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -stdout: -stderr: -1.1: syntax error, unexpected end of file -stderr: -./types.at:139: ./check -./calc.at:1353: $PREPARSER ./calc input -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.7: syntax error, unexpected '=' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1355: $PREPARSER ./calc input stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.7: syntax error, unexpected '=' -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr +stdout: +./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1353: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: -./calc.at:1348: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 7) -Shifting token "number" (1.1: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 7) --> $$ = nterm exp (1.1: 7) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 7) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 7) --> $$ = nterm exp (1.1: 7) +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 7) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: -3) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 -Next token is token '=' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: -3) --> $$ = nterm exp (1.1: -6) +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 Stack now 0 6 8 21 30 -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: -6) --> $$ = nterm exp (1.1: -5) +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 5) -Shifting token "number" (1.1: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 5) --> $$ = nterm exp (1.1: 5) +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 5) --> $$ = nterm exp (1.1: -5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 28 Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -5) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -5) --> $$ = nterm exp (1.1: -5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -5) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 1) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 28 Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -1) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -1) --> $$ = nterm exp (1.1: -1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -1) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: -1) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: -1) +Reducing stack by rule 13 (line 138): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: -1) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: -1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: -1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 28 Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -1) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -1) --> $$ = nterm exp (1.1: -1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -1) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '-' (1.1: ) -Reducing stack by rule 8 (line 99): +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: -1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: -1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: -4) +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 4) -Shifting token "number" (1.1: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 4) --> $$ = nterm exp (1.1: 4) +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 11 (line 116): - $1 = token '-' (1.1: ) - $2 = nterm exp (1.1: 4) --> $$ = nterm exp (1.1: -4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 28 Stack now 0 6 8 19 28 -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: -4) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: -4) --> $$ = nterm exp (1.1: -4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: -4) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 2) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: -1) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 Stack now 0 6 8 20 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: -1) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: -1) +Reducing stack by rule 13 (line 138): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 8 (line 99): +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 137): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: -1) --> $$ = nterm exp (1.1: 2) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 2) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 8) +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 33 Stack now 0 6 8 24 33 -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 2) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 8) --> $$ = nterm exp (1.1: 256) +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 256) -Shifting token "number" (1.1: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 256) --> $$ = nterm exp (1.1: 256) +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 256) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 256) --> $$ = nterm exp (1.1: 256) +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 256) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 2) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 4) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 4) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 4) +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 12 (line 117): - $1 = nterm exp (1.1: 4) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 64) +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 Stack now 0 6 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 64) -Shifting token "number" (1.1: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 64) --> $$ = nterm exp (1.1: 64) +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 64) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 64) --> $$ = nterm exp (1.1: 64) +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 64) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -./calc.at:1351: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1355: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1360: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -32271,6 +36738,71 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1360: cat stderr +input: + | 1//2 +./calc.at:1355: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: + | 1//2 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -33289,43 +37821,17 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1350: $PREPARSER ./calc input -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1351: cat stderr -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 -./calc.at:1353: $PREPARSER ./calc input -input: -stderr: -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' - | - | +1 -./calc.at:1351: $PREPARSER ./calc input -stderr: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -33341,2093 +37847,998 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.1: 2) -Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error, unexpected '+' -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1358: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stderr: -./calc.at:1348: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.1: 2) -Stack now 0 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1350: cat stderr -./calc.at:1348: $PREPARSER ./calc input -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1351: cat stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1353: cat stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1351: $PREPARSER ./calc /dev/null -input: - | 1//2 -./calc.at:1353: $PREPARSER ./calc input -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1350: cat stderr -stderr: -stderr: -./types.at:139: ./check -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.1: ) -Stack now 0 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1350: $PREPARSER ./calc input -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr -./calc.at:1348: cat stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 - | 1 + 2 * 3 + !* ++ -./calc.at:1348: $PREPARSER ./calc input -stderr: -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.14: memory exhausted -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.14: memory exhausted -./calc.at:1353: cat stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1350: cat stderr -input: - | error -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -./calc.at:1350: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: cat stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -./calc.at:1351: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1348: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1350: cat stderr -./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1351: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1353: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1348: cat stderr -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input - | 1 = 2 = 3 -stderr: -./calc.at:1353: $PREPARSER ./calc input -stderr: -input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1348: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token "number" (1.1: 7) +Shifting token "number" (1.1: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) + $1 = token "number" (1.1: 7) +-> $$ = nterm exp (1.1: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 7) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 7) +-> $$ = nterm exp (1.1: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) -Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 7) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 6 8 21 Reading a token Next token is token "number" (1.1: 2) Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 2) -> $$ = nterm exp (1.1: 2) -Entering state 28 -Stack now 0 8 19 28 +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Stack now 0 8 19 -Error: popping token '=' (1.1: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.1: ) -Stack now 0 -1.6: syntax error: invalid character: '#' -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1350: $PREPARSER ./calc input -stderr: -input: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1353: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr - | - | +1 -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: cat stderr -stderr: -./calc.at:1348: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: -3) +-> $$ = nterm exp (1.1: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: -6) +-> $$ = nterm exp (1.1: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (1.1: 5) +Shifting token "number" (1.1: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 5) +-> $$ = nterm exp (1.1: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 5) +-> $$ = nterm exp (1.1: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: -5) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: -5) +-> $$ = nterm exp (1.1: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 76): - $1 = token '\n' (1.1: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: -5) + $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 + !* ++ -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1351: $PREPARSER ./calc input -stderr: -input: -./calc.at:1350: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 3 -Stack now 0 3 +Stack now 0 6 3 Reducing stack by rule 3 (line 76): $1 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (1.1: ) -Stack now 0 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1348: $PREPARSER ./calc input -stderr: -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -stderr: -1.2: syntax error: invalid character: '#' -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1351: $PREPARSER ./calc input -stderr: -./calc.at:1353: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1353: $PREPARSER ./calc /dev/null -./calc.at:1348: cat stderr -./calc.at:1350: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 - | (1 + # + 1) = 1111 -./calc.at:1348: $PREPARSER ./calc input -input: -stderr: -1.6: syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1351: cat stderr -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.14: memory exhausted -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.14: memory exhausted -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1351: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1353: cat stderr -input: -./calc.at:1348: cat stderr - | (#) + (#) = 2222 -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -1.11-17: error: null divisor -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 2 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 2 10 24 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): +Next token is token '=' (1.1: ) +Reducing stack by rule 12 (line 117): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: -1) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: -1) +-> $$ = nterm exp (1.1: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: -1) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 4 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: -1) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token Next token is token "number" (1.1: 2) Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 8 24 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 2) -> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Entering state 33 +Stack now 0 6 8 24 33 Reading a token Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: -1) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: 1) $2 = token '=' (1.1: ) $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: 1) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: -1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: -1) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: -1) +-> $$ = nterm exp (1.1: -1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: -1) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token Next token is token "number" (1.1: 2) Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 2) -> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): +Next token is token '-' (1.1: ) +Reducing stack by rule 8 (line 99): $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) + $2 = token '-' (1.1: ) $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +-> $$ = nterm exp (1.1: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: -1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: -4) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (1.1: 4) +Shifting token "number" (1.1: 4) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 4) +-> $$ = nterm exp (1.1: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (1.1: ) +Reducing stack by rule 11 (line 116): + $1 = token '-' (1.1: ) + $2 = nterm exp (1.1: 4) +-> $$ = nterm exp (1.1: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: -4) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) + $3 = nterm exp (1.1: -4) +-> $$ = nterm exp (1.1: -4) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) + $1 = nterm exp (1.1: -4) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1351: cat stderr -1.11-17: error: null divisor -input: -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1348: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -481. calc.at:1348: ok -./calc.at:1351: cat stderr -./calc.at:1350: cat stderr -input: -input: - | (# + 1) = 1111 - | (1 + # + 1) = 1111 - -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1351: $PREPARSER ./calc input -input: -stderr: -stderr: -1.2: syntax error: invalid character: '#' - | (!!) + (1 2) = 1 -./calc.at:1353: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -stdout: -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 20 4 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 2) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) + $2 = nterm exp (1.1: -1) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +-> $$ = nterm exp (1.1: -1) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: -1) +-> $$ = nterm exp (1.1: 2) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 2) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 2) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr -stderr: -./calc.at:1350: cat stderr -======== Testing with C++ standard flags: '' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 24 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 8 24 33 24 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 Reading a token Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: 2) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (1.1: ) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: 2) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 8) +-> $$ = nterm exp (1.1: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 256) +Shifting token "number" (1.1: 256) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 256) +-> $$ = nterm exp (1.1: 256) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 256) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 256) +-> $$ = nterm exp (1.1: 256) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 256) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -input: - | (1 + # + 1) = 1111 -./calc.at:1351: $PREPARSER ./calc input -input: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | (1 + 1) / (1 - 1) -./calc.at:1350: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -stderr: -1.11-17: error: null divisor -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1360: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr -./calc.at:1353: cat stderr -./calc.at:1350: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -input: -1.11-17: error: null divisor -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1353: $PREPARSER ./calc input -482. calc.at:1350: ok -stderr: -1.11-17: error: null divisor -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 6 4 12 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token Next token is token ')' (1.1: ) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: 2) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) + $2 = nterm exp (1.1: 4) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 6 8 24 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 12 (line 117): + $1 = nterm exp (1.1: 4) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 64) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 64) +Shifting token "number" (1.1: 64) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 64) +-> $$ = nterm exp (1.1: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 64) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 64) +-> $$ = nterm exp (1.1: 64) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 64) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) -> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 @@ -35439,183 +38850,158 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1355: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1355: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 stderr: stdout: -./calc.at:1353: cat stderr -./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1354: "$PERL" -ne ' +./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1358: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -35626,11 +39012,18 @@ || /\t/ )' calc.c calc.h -483. calc.at:1351: ok -input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr input: - | (* *) + (*) + (*) -./calc.at:1353: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -35644,333 +39037,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1354: $PREPARSER ./calc input -490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stderr: -stdout: -stderr: -./types.at:139: ./check -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1358: $PREPARSER ./calc input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -35984,11 +39052,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -35999,11 +39069,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -36014,9 +39086,10 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -36024,7 +39097,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -36035,6 +39108,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -36045,6 +39119,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -36069,6 +39144,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -36079,11 +39155,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -36094,16 +39172,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -36114,16 +39195,17 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -36131,7 +39213,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -36142,11 +39224,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -36157,9 +39241,10 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -36189,6 +39274,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -36205,11 +39291,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -36220,11 +39308,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -36235,9 +39325,10 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -36245,7 +39336,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -36255,11 +39346,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -36270,9 +39363,10 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -36302,16 +39396,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -36322,9 +39419,10 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -36334,18 +39432,20 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -36356,9 +39456,10 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -36369,6 +39470,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -36379,6 +39481,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -36404,6 +39507,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -36420,21 +39524,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -36445,23 +39553,24 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -36471,11 +39580,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -36486,9 +39597,10 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -36518,6 +39630,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -36534,6 +39647,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -36544,11 +39658,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -36559,9 +39675,10 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -36572,6 +39689,7 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -36582,9 +39700,10 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -36595,11 +39714,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -36610,9 +39731,10 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -36642,6 +39764,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -36652,16 +39775,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -36672,11 +39798,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -36687,9 +39815,10 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -36700,16 +39829,17 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -36720,6 +39850,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -36730,6 +39861,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -36755,6 +39887,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -36771,6 +39904,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -36781,11 +39915,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -36796,11 +39932,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -36811,9 +39949,10 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -36821,7 +39960,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -36832,6 +39971,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -36842,6 +39982,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -36867,11 +40008,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -36882,11 +40025,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -36897,9 +40042,10 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -36910,18 +40056,20 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -36932,9 +40080,10 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -36945,6 +40094,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -36955,6 +40105,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -36980,6 +40131,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -36988,13 +40140,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -======== Testing with C++ standard flags: '' -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | + | +1 Starting parse Entering state 0 Stack now 0 @@ -37008,11 +40158,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -37023,11 +40175,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -37038,9 +40192,10 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -37048,7 +40203,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -37059,6 +40214,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -37069,6 +40225,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -37093,6 +40250,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -37103,11 +40261,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -37118,16 +40278,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -37138,16 +40301,17 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -37155,7 +40319,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -37166,11 +40330,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -37181,9 +40347,10 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -37213,6 +40380,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -37229,11 +40397,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -37244,11 +40414,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -37259,9 +40431,10 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -37269,7 +40442,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -37279,11 +40452,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -37292,11 +40467,13 @@ Reducing stack by rule 5 (line 101): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) -Entering state 10 +Enterininput: +g state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -37326,16 +40503,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -37346,9 +40526,10 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -37358,18 +40539,20 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -37380,9 +40563,10 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -37393,6 +40577,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -37403,6 +40588,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -37428,6 +40614,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -37444,21 +40631,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -37469,23 +40660,24 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -37495,11 +40687,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -37510,9 +40704,10 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -37542,6 +40737,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -37558,6 +40754,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -37568,11 +40765,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -37583,19 +40782,22 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +-> $$ = ./calc.at:1355: $PREPARSER ./calc input +nterm exp (9.1-5: -1) Entering state 8 Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -37606,9 +40808,10 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -37619,11 +40822,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -37634,9 +40839,10 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 136): +Reducing stack by rule 11 (line 124): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -37666,6 +40872,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -37676,16 +40883,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -37696,11 +40906,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -37711,9 +40923,10 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -37724,16 +40937,17 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 119): +Reducing stack by rule 8 (line 113): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -37744,6 +40958,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -37754,6 +40969,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -37779,6 +40995,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -37795,6 +41012,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -37805,11 +41023,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -37820,11 +41040,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -37835,9 +41057,10 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -37845,7 +41068,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -37856,6 +41079,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -37866,6 +41090,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -37891,11 +41116,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -37906,11 +41133,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -37921,9 +41150,10 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -37934,18 +41164,20 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): +Reducing stack by rule 13 (line 126): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -37956,9 +41188,10 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 137): +Reducing stack by rule 12 (line 125): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -37969,6 +41202,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -37979,6 +41213,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -38004,6 +41239,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -38012,52 +41248,35 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: | 1 2 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -./calc.at:1353: cat stderr -./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1355: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ ./calc.at:1353: $PREPARSER ./calc input +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -38073,73 +41292,93 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.1: 2) +Stack now 0 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.1: 2) +Stack now 0 + | 1 2 +./calc.at:1358: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -38153,6 +41392,7 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token number (1.3: 2) 1.3: syntax error, unexpected number @@ -38160,9 +41400,27 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stderr: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -38177,6 +41435,7 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token number (1.3: 2) 1.3: syntax error, unexpected number @@ -38184,6 +41443,71 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1360: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1353: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +input: +stderr: + | error +./calc.at:1360: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: + | 1//2 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1353: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -38198,84 +41522,21 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1363: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -./calc.at:1354: "$PERL" -pi -e 'use strict; +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.1: ) +Stack now 0 +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -38285,91 +41546,88 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1353: $PREPARSER ./calc input -stderr: -./calc.at:1354: cat stderr -stderr: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1355: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.1: ) +Stack now 0 +input: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -38383,155 +41641,992 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +stderr: +stdout: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (2.1: 1) + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1360: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1354: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + + | 1 = 2 = 3 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: cat stderr +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1355: cat stderr +./calc.at:1353: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 136): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) -> $$ = nterm exp (2.1-10: -5) @@ -39011,8 +43106,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducin./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -g stack by rule 11 (line 136): +Reducing stack by rule 11 (line 136): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -39388,13 +43482,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1//2 -./calc.at:1354: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -40412,221 +44501,28 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1360: cat stderr +input: input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) - | 1 2 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: stdout: -./types.at:139: ./check -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1353: cat stderr +./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: - | 1 + 2 * 3 + !* ++ -./calc.at:1354: cat stderr + | error ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40636,209 +44532,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1355: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) - | error -./calc.at:1354: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + input: + | (!!) + (1 2) = 1 +./calc.at:1355: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 - | 1//2 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -40853,83 +44570,24 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1354: cat stderr -stderr: -stdout: -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1353: $PREPARSER ./calc input -input: -./types.at:139: $PREPARSER ./test -stderr: - | 1 = 2 = 3 -./calc.at:1354: $PREPARSER ./calc input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -40939,119 +44597,164 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.1: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | + | +1 +./calc.at:1358: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: + | 1 = 2 = 3 +./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -41066,11 +44769,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.3: ) Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -41081,6 +44786,7 @@ -> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '=' (1.7: ) 1.7: syntax error, unexpected '=' @@ -41092,132 +44798,18 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -41231,11 +44823,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.3: ) Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -41246,6 +44840,7 @@ -> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '=' (1.7: ) 1.7: syntax error, unexpected '=' @@ -41257,20 +44852,8 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 - | error -======== Testing with C++ standard flags: '' -./calc.at:1355: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1354: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41280,9 +44863,127 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1358: cat stderr + | 1//2 +./calc.at:1354: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41292,16 +44993,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1354: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41311,17 +45003,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1355: cat stderr -stderr: -stdout: -input: - | - | +1 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: - | (1 + #) = 1111 +./calc.at:1360: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -41348,22 +45030,22 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1357: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | 1 = 2 = 3 -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1355: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -41390,7 +45072,7 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -stderr: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -41401,302 +45083,138 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -41706,7 +45224,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: $PREPARSER ./calc /dev/null stderr: Starting parse Entering state 0 @@ -42027,8 +45544,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by stderr: -rule 4 (line 97): +Reducing stack by rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -42727,17 +46243,7 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1355: cat stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -42747,15 +46253,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 +./calc.at:1360: cat stderr stderr: Starting parse Entering state 0 @@ -43774,71 +47272,31 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1360: $PREPARSER ./calc /dev/null ./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: cat stderr +./calc.at:1354: cat stderr input: - | - | +1 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1353: cat stderr -stderr: input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1358: cat stderr | 1 2 + | (- *) + (1 2) = 1 ./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input + | error +./calc.at:1354: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1358: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -stderr: -input: - | (# + 1) = 1111 Starting parse Entering state 0 Stack now 0 @@ -43859,9 +47317,19 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -43883,16 +47351,6 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -43902,94 +47360,146 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -43999,10 +47509,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: cat stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -44012,94 +47558,146 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./types.at:139: ./check -./calc.at:1357: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -44109,15 +47707,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: cat stderr -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: $PREPARSER ./calc /dev/null +./calc.at:1353: cat stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr ./calc.at:1357: cat stderr -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -44127,6 +47730,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -44135,13 +47739,114 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +input: + | 1//2 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1353: $PREPARSER ./calc input input: stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Stack now 0 8 19 +Error: popping token '=' (1.1: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.1: ) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1358: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -44156,7 +47861,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -44200,7 +47905,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -44223,7 +47928,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -44248,7 +47953,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -44257,7 +47962,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -44307,7 +48012,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -44316,7 +48021,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -44359,7 +48064,7 @@ Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -44392,7 +48097,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -44401,7 +48106,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -44455,18 +48160,14 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1354: $PREPARSER ./calc input +input: stderr: - | 1//2 -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -44482,22 +48183,31 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -44516,7 +48226,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -44560,7 +48270,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -44583,7 +48293,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -44608,7 +48318,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -44617,7 +48327,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -44667,7 +48377,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -44676,7 +48386,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -44719,7 +48429,7 @@ Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -44752,7 +48462,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -44761,7 +48471,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -44808,334 +48518,24 @@ Entering state 6 Stack now 0 6 Reading a token -Now at en./calc.at:1353: cat stderr -d of input. +Now at end of input. Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: - | (1 + # + 1) = 1111 -./calc.at:1353: $PREPARSER ./calc input -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -./calc.at:1357: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1355: $PREPARSER ./calc input -input: - | error -./calc.at:1357: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -45146,23 +48546,26 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) -> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.7: 1) Shifting token number (1.7: 1) @@ -45173,11 +48576,13 @@ -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token number (1.11: 1) Shifting token number (1.11: 1) @@ -45188,9 +48593,10 @@ -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 +Return for a new token: Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -45201,6 +48607,7 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token number (1.15: 1) Shifting token number (1.15: 1) @@ -45211,9 +48618,10 @@ -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 +Return for a new token: Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -45224,6 +48632,7 @@ Shifting token '+' (1.17: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -45238,16 +48647,17 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) -> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -45258,11 +48668,13 @@ Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -45276,6 +48688,7 @@ Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) @@ -45284,6 +48697,7 @@ Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) @@ -45292,21 +48706,23 @@ Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) -> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -45317,11 +48733,13 @@ Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.33: 1) Shifting token number (1.33: 1) @@ -45332,11 +48750,13 @@ -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) Entering state 22 Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token Next token is token number (1.37: 2) Shifting token number (1.37: 2) @@ -45347,9 +48767,10 @@ -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 +Return for a new token: Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -45360,6 +48781,7 @@ Shifting token '*' (1.39: ) Entering state 22 Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -45377,21 +48799,23 @@ Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) -> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -45402,6 +48826,7 @@ Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.46: 1) Shifting token number (1.46: 1) @@ -45412,6 +48837,7 @@ -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): @@ -45437,6 +48863,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -45445,152 +48872,58 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: $PREPARSER ./calc input -stderr: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -45599,6 +48932,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -45609,23 +48943,26 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) -> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.7: 1) Shifting token number (1.7: 1) @@ -45636,11 +48973,13 @@ -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token number (1.11: 1) Shifting token number (1.11: 1) @@ -45651,9 +48990,10 @@ -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 +Return for a new token: Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -45664,6 +49004,7 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token number (1.15: 1) Shifting token number (1.15: 1) @@ -45674,9 +49015,10 @@ -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 +Return for a new token: Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -45687,6 +49029,7 @@ Shifting token '+' (1.17: ) Entering state 21 Stack now 0 8 21 4 12 21 +Return for a new token: Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -45701,16 +49044,17 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) -> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -45721,11 +49065,13 @@ Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -45739,6 +49085,7 @@ Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) @@ -45747,6 +49094,7 @@ Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) @@ -45755,21 +49103,23 @@ Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) -> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -45780,11 +49130,13 @@ Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.33: 1) Shifting token number (1.33: 1) @@ -45795,11 +49147,13 @@ -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) Entering state 22 Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token Next token is token number (1.37: 2) Shifting token number (1.37: 2) @@ -45810,9 +49164,10 @@ -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 +Return for a new token: Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -45823,6 +49178,7 @@ Shifting token '*' (1.39: ) Entering state 22 Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -45840,21 +49196,23 @@ Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) -> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -45865,6 +49223,7 @@ Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.46: 1) Shifting token number (1.46: 1) @@ -45875,6 +49234,7 @@ -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): @@ -45900,6 +49260,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -45913,12 +49274,42 @@ Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1360: cat stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -45928,6 +49319,71 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: +./calc.at:1354: cat stderr +./calc.at:1358: cat stderr +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1358: $PREPARSER ./calc input + | + | +1 +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -45936,45 +49392,51 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 +Return for a new token: Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): +Reducing stack by rule 16 (line 129): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) -> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.9: 1) Shifting token number (1.9: 1) @@ -45985,6 +49447,7 @@ -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token number (1.11: 2) 1.11: syntax error, unexpected number @@ -46000,21 +49463,23 @@ Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) -> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -46025,6 +49490,7 @@ Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -46035,6 +49501,7 @@ -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): @@ -46060,6 +49527,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -46068,18 +49536,188 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46089,9 +49727,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1355: cat stderr -./calc.at:1357: cat stderr ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -46103,60 +49738,156 @@ }eg ' expout || exit 77 ./calc.at:1354: cat stderr -input: -input: +./calc.at:1354: $PREPARSER ./calc /dev/null | (!!) + (1 2) = 1 -./calc.at:1355: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1357: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -input: +./calc.at:1360: $PREPARSER ./calc input stderr: - | (1 + 1) / (1 - 1) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: $PREPARSER ./calc input +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1353: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -46175,7 +49906,7 @@ Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): +Reducing stack by rule 16 (line 129): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -46187,7 +49918,7 @@ Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -46234,7 +49965,7 @@ Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -46243,7 +49974,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -46298,6 +50029,14 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -46307,18 +50046,20 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 Stack now 0 4 2 +Return for a new token: Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): +Reducing stack by rule 15 (line 128): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -46332,28 +50073,32 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.10: 1) Shifting token number (1.10: 1) @@ -46364,6 +50109,7 @@ -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token number (1.12: 2) 1.12: syntax error, unexpected number @@ -46379,21 +50125,23 @@ Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) -> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -46404,6 +50152,7 @@ Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.17: 1) Shifting token number (1.17: 1) @@ -46414,6 +50163,7 @@ -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): @@ -46439,6 +50189,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -46447,45 +50198,26 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 stderr: Starting parse @@ -46496,176 +50228,20 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Return for a new token: Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): +Reducing stack by rule 15 (line 128): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -46679,28 +50255,32 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.10: 1) Shifting token number (1.10: 1) @@ -46711,6 +50291,7 @@ -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token number (1.12: 2) 1.12: syntax error, unexpected number @@ -46726,21 +50307,23 @@ Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) -> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -46751,6 +50334,7 @@ Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.17: 1) Shifting token number (1.17: 1) @@ -46761,6 +50345,7 @@ -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): @@ -46786,6 +50371,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -46794,293 +50380,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: cat stderr stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stdout: ./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1357: cat stderr ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -47091,17 +50394,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1354: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | - | +1 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -47111,9 +50404,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; +input: +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -47123,373 +50415,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: cat stderr -stderr: -./calc.at:1353: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) | (- *) + (1 2) = 1 -./calc.at:1355: $PREPARSER ./calc input +./calc.at:1360: $PREPARSER ./calc input +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +./calc.at:1354: cat stderr stderr: +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 @@ -47509,7 +50442,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): +Reducing stack by rule 15 (line 128): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -47528,7 +50461,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -47575,7 +50508,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -47584,7 +50517,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -47638,18 +50571,14 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -484. calc.at:1353: ok +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1353: cat stderr + | (* *) + (*) + (*) +./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -47670,7 +50599,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): +Reducing stack by rule 15 (line 128): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -47689,7 +50618,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -47736,7 +50665,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -47745,7 +50674,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -47799,388 +50728,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: cat stderr -./calc.at:1357: cat stderr - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1357: $PREPARSER ./calc /dev/null -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1355: cat stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (* *) + (*) + (*) -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1357: cat stderr -./calc.at:1354: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -48189,6 +50737,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -48202,6 +50751,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) @@ -48210,28 +50760,32 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -48245,21 +50799,23 @@ Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -48270,11 +50826,13 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.15: ) Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -48288,21 +50846,23 @@ Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -48324,6 +50884,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -48332,201 +50893,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -======== Testing with C++ standard flags: '' -492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1354: cat stderr -./calc.at:1355: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: Starting parse Entering state 0 @@ -48546,7 +50913,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -48590,7 +50957,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -48613,7 +50980,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -48638,7 +51005,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -48647,7 +51014,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -48697,7 +51064,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -48706,7 +51073,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -48749,7 +51116,7 @@ Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -48782,7 +51149,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -48791,7 +51158,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -48845,25 +51212,224 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1364: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -input: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | 1 + 2 * 3 + !+ ++ -./calc.at:1355: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -48883,7 +51449,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -48927,7 +51493,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -48950,7 +51516,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -48975,7 +51541,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -48984,7 +51550,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -49034,7 +51600,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -49043,7 +51609,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -49086,7 +51652,7 @@ Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -49119,7 +51685,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -49128,7 +51694,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -49182,178 +51748,224 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -stderr: -stdout: -./types.at:139: ./check +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1360: cat stderr +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (1.1: ) +Stack now 0 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1358: cat stderr + | (* *) + (*) + (*) +./calc.at:1360: $PREPARSER ./calc input +input: + | error +./calc.at:1357: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -49363,88 +51975,164 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1354: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -49459,11 +52147,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -49474,11 +52164,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -49489,9 +52181,10 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -49499,7 +52192,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -49510,124 +52203,184 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 input: - | 1 + 2 * 3 + !- ++ -./calc.at:1355: $PREPARSER ./calc input + | (!!) + (1 2) = 1 stderr: +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1353: cat stderr input: - | (!!) + (1 2) = 1 +stderr: + | (* *) + (*) + (*) +./calc.at:1353: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -49641,11 +52394,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -49656,11 +52411,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -49671,9 +52428,10 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -49681,7 +52439,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -49692,22 +52450,47 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -49717,121 +52500,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -49848,9 +52642,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -49869,7 +52660,7 @@ Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): +Reducing stack by rule 16 (line 141): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -49881,7 +52672,7 @@ Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -49928,7 +52719,7 @@ Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -49937,7 +52728,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -49991,31 +52782,118 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: cat stderr +./calc.at:1360: cat stderr + | 1 + 2 * 3 + !- ++ +stderr: +stderr: +./calc.at:1358: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1357: $PREPARSER ./calc input stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -50025,102 +52903,132 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -50137,134 +53045,160 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1357: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1355: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -50278,11 +53212,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -50293,11 +53229,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -50308,9 +53246,10 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -50318,7 +53257,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -50329,25 +53268,66 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1354: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1355: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -50357,10 +53337,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: +./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 @@ -50406,7 +53384,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -50414,7 +53392,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -50431,22 +53409,34 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: cat stderr +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1355: $PREPARSER ./calc input | (- *) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -50456,6 +53446,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -50475,7 +53467,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 140): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -50494,7 +53486,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -50541,7 +53533,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -50550,7 +53542,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -50604,11 +53596,96 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + #) = 1111 -./calc.at:1354: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -50628,7 +53705,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 140): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -50647,7 +53724,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -50694,7 +53771,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -50703,7 +53780,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -50758,220 +53835,275 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1355: cat stderr stderr: -stdout: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 +Return for a new token: Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +stdout: +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1353: cat stderr + | + | +1 +./calc.at:1357: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: cat stderr +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -50982,152 +54114,100 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | (#) + (#) = 2222 -stderr: -stdout: -./calc.at:1355: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1357: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1354: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +input: +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -51137,122 +54217,1098 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1353: $PREPARSER ./calc input ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.8: ) +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -51269,9 +55325,24 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr stderr: +input: + | (#) + (#) = 2222 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -51306,7 +55377,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -51341,7 +55412,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -51350,7 +55421,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -51384,7 +55455,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -51393,7 +55464,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -51423,8 +55494,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -======== Testing with C++ standard flags: '' +input: Starting parse Entering state 0 Stack now 0 @@ -51433,8 +55515,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -51445,30 +55528,34 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) -> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.5: ) Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.7: ) Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 -Reading a token +Return for a new token: 1.8: syntax error: invalid character: '#' +Reading a token Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 @@ -51479,21 +55566,23 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) -> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -51504,6 +55593,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13-16: 2222) Shifting token number (1.13-16: 2222) @@ -51514,6 +55604,7 @@ -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): @@ -51538,6 +55629,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -51546,13 +55638,97 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !* ++ +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc /dev/null +./calc.at:1360: $PREPARSER ./calc input input: - | (# + 1) = 1111 -./calc.at:1354: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -51562,6 +55738,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -51570,8 +55747,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -51582,65 +55760,99 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' +Reading a token +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -51649,6 +55861,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -51657,9 +55870,104 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1353: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +stderr: +stderr: +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -51669,132 +55977,188 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: cat stderr -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stdout: +./types.at:139: ./check +input: + | (!!) + (1 2) = 1 +stderr: +stderr: +./calc.at:1353: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1354: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -./calc.at:1355: cat stderr -./calc.at:1358: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: - | (1 + #) = 1111 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1357: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: stderr: Starting parse Entering state 0 @@ -51841,7 +56205,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -51849,7 +56213,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -51870,15 +56234,13 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -51888,111 +56250,175 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: ./check +./calc.at:1358: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -52038,7 +56464,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -52046,7 +56472,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -52067,15 +56493,160 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: + | (1 + #) = 1111 ./calc.at:1358: $PREPARSER ./calc input +./calc.at:1360: cat stderr +./calc.at:1357: cat stderr +stderr: +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -52084,6 +56655,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -52094,13 +56666,15 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Reading a token +Return for a new token: 1.6: syntax error: invalid character: '#' +Reading a token Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -52115,23 +56689,26 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -52142,6 +56719,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -52166,6 +56744,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -52174,11 +56753,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (#) + (#) = 2222 +./calc.at:1360: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ input: - | (1 + # + 1) = 1111 ./calc.at:1354: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1357: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -52196,6 +56785,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -52206,13 +56796,15 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Reading a token +Return for a new token: 1.6: syntax error: invalid character: '#' +Reading a token Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -52227,65 +56819,176 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error (1.2-8: ) +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -52302,9 +57005,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -52318,13 +57019,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -52335,13 +57034,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -52352,56 +57049,152 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -52410,1008 +57203,342 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Return for a new token: -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Return for a new token: -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Return for a new token: -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Return for a new token: -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Return for a new token: -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Return for a new token: -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Return for a new token: -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 -Return for a new token: -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Return for a new token: -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Return for a new token: -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Return for a new token: -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Return for a new token: -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Return for a new token: -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Stack now 0 6 8 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 -Stack now 0 6 8 20 4 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Return for a new token: +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Return for a new token: +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Return for a new token: + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Return for a new token: +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Return for a new token: +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 6 4 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 6 4 12 -Return for a new token: +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Return for a new token: +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Return for a new token: + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: cat stderr +stderr: +stderr: +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -53421,95 +57548,102 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -53526,9 +57660,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: - | (# + 1) = 1111 Starting parse Entering state 0 Stack now 0 @@ -53542,13 +57673,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -53559,13 +57688,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -53576,56 +57703,158 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -53634,1005 +57863,794 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 6 8 21 -Return for a new token: +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 6 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Return for a new token: +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Return for a new token: +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Return for a new token: + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Return for a new token: +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Return for a new token: +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Return for a new token: +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Return for a new token: + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Return for a new token: +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Return for a new token: + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Return for a new token: -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Return for a new token: -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Return for a new token: Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Return for a new token: +Now at end of inp./calc.at:1353: cat stderr +ut. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: cat stderr +./calc.at:1358: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Stack now 0 4 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = ./calc.at:1355: $PREPARSER ./calc input -nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Return for a new token: +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input +input: +stderr: +stderr: + | 1 + 2 * 3 + !* ++ +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 -Return for a new token: -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Return for a new token: -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Return for a new token: -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Return for a new token: -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Stack now 0 8 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Return for a new token: +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Return for a new token: +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Return for a new token: +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Return for a new token: +Stack now 0 8 19 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Return for a new token: +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: +./calc.at:1354: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Return for a new token: Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Return for a new token: Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Return for a new token: Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Return for a new token: -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 Return for a new token: Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Return for a new token: Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +450. types.at:139: | (1 + #) = 1111 + ok +./calc.at:1360: $PREPARSER ./calc input stderr: -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -54678,7 +58696,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -54686,7 +58704,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -54703,27 +58721,22 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: cat stderr stderr: stderr: Starting parse @@ -54734,8 +58747,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -54746,6 +58760,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -54754,6 +58769,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -54762,23 +58778,26 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -54789,6 +58808,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -54813,6 +58833,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -54821,10 +58842,105 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -input: - | 1 2 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: Starting parse Entering state 0 Stack now 0 @@ -54870,7 +58986,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -54878,7 +58994,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -54895,19 +59011,21 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: $PREPARSER ./calc input -stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | (!!) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 @@ -54917,29 +59035,32 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -55003,30 +59124,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -55036,41 +59136,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + +stderr: stderr: - | (1 + 1) / (1 - 1) -./calc.at:1354: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr stderr: Starting parse Entering state 0 @@ -55096,107 +59164,64 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55213,12 +59238,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -55228,7 +59248,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -55238,122 +59257,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55370,178 +59388,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -55551,7 +59399,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: Starting parse Entering state 0 Stack now 0 @@ -55592,64 +59439,48 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55666,12 +59497,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | 1//2 -./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -55682,95 +59507,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55787,7 +59638,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1354: cat stderr +./calc.at:1358: cat stderr +./calc.at:1353: cat stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -55797,39 +59651,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -55840,51 +59661,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -485. calc.at:1354: Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 - ok input: -./calc.at:1355: cat stderr | (#) + (#) = 2222 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +./calc.at:1354: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) +./calc.at:1353: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -55911,7 +59696,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -55945,7 +59730,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -55954,7 +59739,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -56007,12 +59792,305 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) +./calc.at:1357: cat stderr + | (# + 1) = 1111 +stderr: ./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 @@ -56038,7 +60116,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -56072,7 +60150,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -56081,7 +60159,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -56134,7 +60212,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - stderr: Starting parse Entering state 0 @@ -56145,151 +60222,148 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -56298,6 +60372,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -56308,113 +60383,94 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56423,6 +60479,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -56431,84 +60488,41 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1358: $PREPARSER ./calc input -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -./types.at:139: $PREPARSER ./test -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: stderr: +./calc.at:1360: cat stderr input: -./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1357: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -486. calc.at:1355: Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -56518,7 +60532,7 @@ Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -56572,9 +60586,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -./calc.at:1358: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -56585,7 +60600,18 @@ }eg ' expout || exit 77 stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +input: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -56595,32 +60621,29 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -56630,7 +60653,7 @@ Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -56684,127 +60707,159 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: cat stderr - -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1358: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -493. calc.at:1367: testing Calculator parse.error=custom ... -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1367: mv calc.y.tmp calc.y - -./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | (# + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +stderr: stderr: Starting parse Entering state 0 @@ -56901,9 +60956,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1354: $PREPARSER ./calc input ./calc.at:1358: cat stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -56913,76 +60970,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56999,12 +61109,17 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | - | +1 -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57014,69 +61129,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... -./calc.at:1368: mv calc.y.tmp calc.y - -./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (1 + # + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -57086,95 +61141,76 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57191,18 +61227,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Stack now 0 @@ -57243,64 +61267,48 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57317,46 +61325,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: cat stderr -./calc.at:1358: $PREPARSER ./calc /dev/null -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: ./check -./calc.at:1357: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + 1) / (1 - 1) -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57366,6 +61338,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1355: cat stderr stderr: Starting parse Entering state 0 @@ -57375,6 +61348,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -57385,11 +61359,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -57400,6 +61376,7 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -57420,16 +61397,19 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 +Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 +Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -57440,11 +61420,13 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -57455,6 +61437,7 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -57475,6 +61458,7 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -57500,6 +61484,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -57508,10 +61493,123 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: + | (1 + # + 1) = 1111 +./calc.at:1355: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1353: cat stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +stderr: +./calc.at:1360: cat stderr Starting parse Entering state 0 Stack now 0 @@ -57520,6 +61618,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -57530,11 +61629,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -57545,6 +61646,7 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -57565,16 +61667,19 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 +Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 +Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -57585,11 +61690,13 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -57600,6 +61707,7 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -57620,6 +61728,7 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -57645,6 +61754,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -57654,19 +61764,6 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr stderr: Starting parse Entering state 0 @@ -57676,326 +61773,357 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1353: $PREPARSER ./calc input +input: +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1354: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Stack now 0 8 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Return for a new token: -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58004,7 +62132,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58013,8 +62140,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -487. calc.at:1357: ok +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -58024,326 +62150,335 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: + | (# + 1) = 1111 +input: +./calc.at:1354: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1360: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Stack now 0 8 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 -Return for a new token: +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Return for a new token: +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Return for a new token: Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Return for a new token: -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58352,7 +62487,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58361,23 +62495,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1358: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1358: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: Starting parse Entering state 0 @@ -58387,133 +62506,96 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Return for a new token: +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58522,7 +62604,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58531,7 +62612,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -58541,133 +62621,77 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58676,7 +62700,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58685,7 +62708,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -58695,169 +62719,194 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: cat stderr -stderr: -stdout: -./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 -./calc.at:1358: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1353: $PREPARSER ./calc input stderr: -./calc.at:1360: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h +stderr: +488. calc.at:1358: ok -495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: +Stack now 0 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58866,7 +62915,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58875,11 +62923,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: mv calc.y.tmp calc.y - -stderr: -./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y Starting parse Entering state 0 Stack now 0 @@ -58888,132 +62931,327 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +input: + | (1 + 1) / (1 - 1) +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: cat stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: +Stack now 0 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Stack now 0 4 12 21 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -59021,7 +63259,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -59031,7 +63269,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -59040,22 +63277,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -59065,11 +63290,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1358: $PREPARSER ./calc input -stderr: +./calc.at:1353: cat stderr +./calc.at:1360: cat stderr +./calc.at:1354: cat stderr stderr: Starting parse Entering state 0 @@ -59115,7 +63338,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -59123,7 +63346,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -59131,963 +63354,615 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 21 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 2 10 24 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 8 23 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 23 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1360: $PREPARSER ./calc input + | (1 + # + 1) = 1111 + | 1 + 2 * 3 + !* ++ +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '=' (9.11: ) +Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -60096,145 +63971,96 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -60243,7 +64069,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -60252,12 +64077,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' Starting parse Entering state 0 Stack now 0 @@ -60266,136 +64085,114 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Stack now 0 4 12 21 30 Reading a token -Next token is token '+' (1.13: ) +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: +Stack now 0 8 23 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -60403,7 +64200,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -60413,7 +64210,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -60423,6 +64219,90 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1357: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: Starting parse Entering state 0 Stack now 0 @@ -60467,7 +64347,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -60475,7 +64355,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -60483,976 +64363,2039 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +486. calc.at:1355: ok +490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1363: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1362: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1354: cat stderr + +./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: + | (1 + 1) / (1 - 1) +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1360: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 2 10 24 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: cat stderr +489. calc.at:1360: ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok + +./calc.at:1353: cat stderr +492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1364: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +stdout: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 +./calc.at:1353: $PREPARSER ./calc input +input: +./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 + 2 * 3 + !* ++ +./calc.at:1357: $PREPARSER ./calc input +493. calc.at:1367: testing Calculator parse.error=custom ... +./calc.at:1367: mv calc.y.tmp calc.y + +./types.at:139: $PREPARSER ./test +./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1354: cat stderr +./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +485. calc.at:1354: ok + +449. types.at:139: ok +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr + +494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... +./calc.at:1368: mv calc.y.tmp calc.y + +./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1353: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1353: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +stderr: +./calc.at:1357: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1369: mv calc.y.tmp calc.y + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + #) = 1111 +./calc.at:1357: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1353: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 24 33 24 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (# + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1357: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1357: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1362: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1358: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -61462,264 +66405,1075 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: cat stderr +./calc.at:1357: cat stderr input: -./calc.at:1358: cat stderr - | 1 2 -input: -./calc.at:1360: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1362: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input stderr: input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -stdout: -./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | 1 + 2 * 3 + !+ ++ -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1357: $PREPARSER ./calc input stderr: stderr: -./calc.at:1363: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Return for a new token: +Stack now 0 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1353: cat stderr +input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1353: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Return for a new token: +Stack now 0 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Stack now 0 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -61729,6 +67483,222 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +487. calc.at:1357: ok + +./calc.at:1353: cat stderr +484. calc.at:1353: ok + +496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... +./calc.at:1370: mv calc.y.tmp calc.y + +./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... +./calc.at:1371: mv calc.y.tmp calc.y + +./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: +./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1367: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1368: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1369: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1369: $PREPARSER ./calc input +stderr: +input: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 + | 1 2 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: + | 1 2 +./calc.at:1369: $PREPARSER ./calc input +stderr: +./calc.at:1368: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +input: + | 1//2 +./calc.at:1368: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +stderr: +stderr: +stdout: +./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: +./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1362: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1363: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1369: cat stderr +./calc.at:1368: cat stderr +input: + | 1//2 +input: +./calc.at:1369: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1362: $PREPARSER ./calc input input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -61744,6 +67714,48 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1363: $PREPARSER ./calc input + | error +./calc.at:1368: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1364: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +./calc.at:1367: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | error +./calc.at:1367: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1364: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -62762,113 +68774,18 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1360: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -62878,12 +68795,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -62893,18 +68810,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -62912,7 +68829,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -62924,18 +68841,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -62946,24 +68863,24 @@ Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 @@ -62973,12 +68890,12 @@ Entering state 21 Stack now 0 6 8 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 @@ -62993,25 +68910,25 @@ Entering state 2 Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -63019,7 +68936,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -63036,25 +68953,25 @@ Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 28 Stack now 0 6 8 19 28 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -63065,13 +68982,13 @@ Shifting token '\n' (2.16-3.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -63082,12 +68999,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -63099,12 +69016,12 @@ Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 @@ -63114,18 +69031,18 @@ Entering state 24 Stack now 0 6 2 10 24 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 112): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -63133,7 +69050,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -63149,25 +69066,25 @@ Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 28 Stack now 0 6 8 19 28 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -63178,13 +69095,13 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -63201,18 +69118,18 @@ Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -63222,7 +69139,7 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 113): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -63235,18 +69152,18 @@ Entering state 24 Stack now 0 6 8 24 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 112): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -63258,18 +69175,18 @@ Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -63280,13 +69197,13 @@ Shifting token '\n' (5.11-6.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -63297,12 +69214,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -63324,32 +69241,32 @@ Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -63365,25 +69282,25 @@ Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 28 Stack now 0 6 8 19 28 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -63394,13 +69311,13 @@ Shifting token '\n' (7.10-8.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -63411,24 +69328,24 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 @@ -63438,18 +69355,18 @@ Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 100): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -63461,18 +69378,18 @@ Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 100): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -63489,26 +69406,25 @@ Entering state 2 Stack now 0 6 8 19 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducin./calc.at:1360: $PREPARSER ./calc input -g stack by rule 11 (line 124): +Reducing stack by rule 11 (line 111): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 28 Stack now 0 6 8 19 28 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -63519,25 +69435,25 @@ Shifting token '\n' (9.15-10.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 18 Stack now 0 6 18 -Reducing stack by rule 2 (line 92): +Reducing stack by rule 2 (line 79): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 @@ -63552,12 +69468,12 @@ Entering state 4 Stack now 0 6 8 20 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 @@ -63567,12 +69483,1038 @@ Entering state 20 Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 @@ -63877,130 +70819,1027 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 -Return for a new token: +Stack now 0 6 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stderr: -./calc.at:1362: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -65018,68 +72857,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +input: + | error +./calc.at:1369: $PREPARSER ./calc input +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: cat stderr stderr: Starting parse Entering state 0 @@ -66098,16 +73882,101 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +input: +stderr: + | 1 2 + | 1 2 +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1362: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1364: $PREPARSER ./calc input +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +stderr: +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: +stderr: +./calc.at:1369: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 + | 1 = 2 = 3 +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1368: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +stderr: Starting parse Entering state 0 Stack now 0 @@ -66128,12 +73997,12 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66143,9 +74012,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1369: $PREPARSER ./calc input +stderr: +stderr: stderr: input: - | 1 2 Starting parse Entering state 0 Stack now 0 @@ -66159,89 +74031,89 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 + | + | +1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: cat stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: $PREPARSER ./calc input +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +stderr: +./calc.at:1368: $PREPARSER ./calc input +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: cat stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: - | error + | 1//2 +./calc.at:1364: $PREPARSER ./calc input stderr: -./calc.at:1360: $PREPARSER ./calc input +stdout: +./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1370: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: cat stderr +./calc.at:1367: cat stderr stderr: +./calc.at:1369: cat stderr Starting parse Entering state 0 Stack now 0 @@ -66256,115 +74128,114 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +./calc.at:1362: cat stderr +input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1368: cat stderr + | + | +1 +./calc.at:1367: $PREPARSER ./calc input +input: +stderr: + | 1//2 +./calc.at:1363: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1362: $PREPARSER ./calc input +input: +./calc.at:1368: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1370: $PREPARSER ./calc input + | + | +1 +./calc.at:1369: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1//2 -stderr: -Starting parse -Entering state 0 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -66392,7 +74263,33 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1358: "$PERL" -pi -e 'use strict; +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +./calc.at:1371: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66402,7 +74299,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -66418,23 +74316,21 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 stderr: -./calc.at:1358: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -66462,6 +74358,24 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: cat stderr +input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1371: $PREPARSER ./calc input ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -66473,18 +74387,16 @@ }eg ' expout || exit 77 input: - | (#) + (#) = 2222 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 2 +./calc.at:1370: $PREPARSER ./calc input + | error +stderr: +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -66495,364 +74407,85 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: cat stderr +./calc.at:1369: $PREPARSER ./calc /dev/null +./calc.at:1367: cat stderr +./calc.at:1368: cat stderr stderr: -./calc.at:1362: cat stderr -./calc.at:1360: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1367: $PREPARSER ./calc /dev/null +./calc.at:1363: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: input: +./calc.at:1362: cat stderr stderr: +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1368: $PREPARSER ./calc input input: - | 1 = 2 = 3 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: $PREPARSER ./calc input - | 1//2 +stderr: +stderr: | error ./calc.at:1363: $PREPARSER ./calc input +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error ./calc.at:1362: $PREPARSER ./calc input stderr: -stderr: -stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) +Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1370: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66862,8 +74495,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + | 1 2 +input: +./calc.at:1371: $PREPARSER ./calc input + | 1//2 +./calc.at:1370: $PREPARSER ./calc input stderr: -stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -66873,6 +74524,89 @@ Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 stderr: +./calc.at:1369: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1368: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1369: $PREPARSER ./calc input +stderr: +input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: + | (!!) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1364: cat stderr +./calc.at:1363: cat stderr +stderr: +./calc.at:1362: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1370: cat stderr +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +input: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: + | 1 = 2 = 3 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1363: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input +stderr: +stderr: +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -66926,184 +74660,101 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1358: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1362: cat stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -./calc.at:1363: cat stderr stderr: -input: -input: + | error +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 = 2 = 3 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: $PREPARSER ./calc input - | - | +1 -./calc.at:1360: $PREPARSER ./calc input +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: cat stderr +stderr: +./calc.at:1369: cat stderr +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1371: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -67113,12 +74764,12 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 @@ -67134,152 +74785,49 @@ Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 input: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1363: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 stderr: + | (- *) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -67319,33 +74867,23 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +input: + | (!!) + (1 2) = 1 +input: +./calc.at:1369: $PREPARSER ./calc input + | 1//2 +./calc.at:1371: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67355,15 +74893,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -67374,8 +74904,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67385,246 +74914,123 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1370: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: + | 1 = 2 = 3 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1367: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +input: + | (!!) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1363: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1364: cat stderr ./calc.at:1362: cat stderr +stderr: input: -./calc.at:1360: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1358: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) input: -./calc.at:1360: $PREPARSER ./calc /dev/null +input: +stderr: + | + | +1 +./calc.at:1368: cat stderr +./calc.at:1363: $PREPARSER ./calc input + | + | +1 +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 + | + | +1 +./calc.at:1369: cat stderr +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1371: cat stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1363: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -stderr: -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' -Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +input: +input: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input + | error +./calc.at:1371: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -67650,31 +75056,40 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +stderr: +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 - | 1 = 2 = 3 -./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) Starting parse Entering state 0 Stack now 0 @@ -67683,12 +75098,12 @@ Shifting token '\n' (1.1-2.0: ) Entering state 3 Stack now 0 3 -Reducing stack by rule 3 (line 96): +Reducing stack by rule 3 (line 83): $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -67700,47 +75115,46 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1360: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67750,10 +75164,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1370: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67763,51 +75175,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 input: - | (1 + # + 1) = 1111 -./calc.at:1362: cat stderr -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1363: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1367: cat stderr +./calc.at:1364: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -67817,136 +75191,107 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1369: cat stderr +./calc.at:1368: cat stderr +./calc.at:1363: cat stderr +./calc.at:1371: cat stderr +stderr: +./calc.at:1364: $PREPARSER ./calc /dev/null +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +input: +./calc.at:1363: $PREPARSER ./calc /dev/null +input: + | (- *) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +input: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1369: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1368: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1362: cat stderr stderr: +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +stderr: +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1370: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1360: $PREPARSER ./calc input +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -67956,7 +75301,91 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 +./calc.at:1370: $PREPARSER ./calc /dev/null +./calc.at:1371: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1369: cat stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: $PREPARSER ./calc input +stderr: +input: +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: cat stderr +stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +input: +input: +./calc.at:1363: cat stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (* *) + (*) + (*) +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1368: cat stderr +stderr: +./calc.at:1362: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: +input: +stderr: +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1370: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1362: $PREPARSER ./calc input +input: Starting parse Entering state 0 Stack now 0 @@ -67965,126 +75394,324 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' +Stack now 0 8 21 Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Return for a new token: +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 -Return for a new token: +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 -Return for a new token: +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Return for a new token: +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1371: cat stderr stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -68103,7 +75730,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -68121,12 +75748,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -68136,18 +75763,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -68159,18 +75786,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -68195,7 +75822,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -68204,7 +75831,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -68254,7 +75881,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -68263,7 +75890,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -68280,12 +75907,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -68295,18 +75922,18 @@ Entering state 22 Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -68339,7 +75966,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -68348,7 +75975,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -68360,18 +75987,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -68383,59 +76010,33 @@ Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +stderr: +stderr: +1.14: memory exhausted +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 @@ -68746,254 +76347,19 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. +Now at end of inp | 1 + 2 * 3 + !- ++ +./calc.at:1369: $PREPARSER ./calc input +ut. Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1358: cat stderr -./calc.at:1362: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1362: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1360: cat stderr -stderr: -stdout: -./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Return for a new token: -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Return for a new token: -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -69012,7 +76378,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -69030,12 +76396,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -69045,18 +76411,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -69068,18 +76434,18 @@ Entering state 21 Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -69104,7 +76470,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -69113,7 +76479,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -69163,7 +76529,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -69172,7 +76538,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -69189,12 +76555,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 @@ -69204,18 +76570,18 @@ Entering state 22 Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -69248,7 +76614,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -69257,7 +76623,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -69269,18 +76635,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -69292,43 +76658,27 @@ Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1367: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - stderr: -./calc.at:1363: cat stderr Starting parse Entering state 0 Stack now 0 @@ -69337,152 +76687,322 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 4 12 21 -Return for a new token: +Stack now 0 8 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 4 12 21 30 -Return for a new token: +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Return for a new token: +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 8 23 4 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 8 23 4 12 -Return for a new token: +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Return for a new token: +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Return for a new token: +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Return for a new token: +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -69800,6 +77320,80 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.14: memory exhausted +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1367: cat stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +stderr: +./calc.at:1364: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1367: $PREPARSER ./calc input +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1364: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1369: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: cat stderr +./calc.at:1362: cat stderr +stderr: +./calc.at:1363: cat stderr +./calc.at:1371: cat stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -69818,7 +77412,7 @@ Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): +Reducing stack by rule 16 (line 116): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -69830,7 +77424,7 @@ Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -69848,25 +77442,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.11: 2) +Next token is token "number" (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Stack now 0 8 21 4 Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Error: popping token error (1.9-11: ) Stack now 0 8 21 4 Shifting token error (1.9-11: ) @@ -69877,7 +77471,7 @@ Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -69886,7 +77480,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -69898,18 +77492,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) @@ -69921,76 +77515,43 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: $PREPARSER ./calc /dev/null -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: stderr: input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1367: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1362: $PREPARSER ./calc input stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: cat stderr +1.14: memory exhausted +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -70131,37 +77692,307 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 stderr: -./calc.at:1362: cat stderr -488. calc.at:1358: ok -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 Stack now 0 -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input -input: +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.14: memory exhausted +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1360: cat stderr - | 1 2 -./calc.at:1367: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -70302,22 +78133,14 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1367: $PREPARSER ./calc input stderr: -input: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -70336,7 +78159,7 @@ Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): +Reducing stack by rule 16 (line 116): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -70348,7 +78171,7 @@ Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -70366,25 +78189,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.11: 2) +Next token is token "number" (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Stack now 0 8 21 4 Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Error: popping token error (1.9-11: ) Stack now 0 8 21 4 Shifting token error (1.9-11: ) @@ -70395,7 +78218,7 @@ Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -70404,7 +78227,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -70416,18 +78239,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) @@ -70439,31 +78262,108 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1368: cat stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +./calc.at:1370: cat stderr stderr: +input: + | (1 + #) = 1111 +input: +input: + | (#) + (#) = 2222 +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1368: $PREPARSER ./calc input | (- *) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1367: cat stderr +./calc.at:1364: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1371: cat stderr +stderr: +stderr: +./calc.at:1362: cat stderr +1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: cat stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (- *) + (1 2) = 1 +./calc.at:1364: $PREPARSER ./calc input +stderr: +stderr: +input: +input: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 + | (!!) + (1 2) = 1 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1363: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -70484,7 +78384,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -70503,7 +78403,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -70521,25 +78421,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) @@ -70550,7 +78450,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -70559,7 +78459,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -70571,18 +78471,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -70594,48 +78494,36 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -./calc.at:1362: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -stdout: -./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: stderr: stderr: -input: +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +memory exhausted Starting parse Entering state 0 Stack now 0 @@ -70784,7 +78672,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1//2 +stderr: Starting parse Entering state 0 Stack now 0 @@ -70794,298 +78682,129 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -71102,24 +78821,14 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (- *) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1368: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -71139,7 +78848,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -71158,7 +78867,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -71176,25 +78885,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) @@ -71205,7 +78914,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -71214,7 +78923,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -71226,18 +78935,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -71249,38 +78958,32 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1368: cat stderr +./calc.at:1369: cat stderr +memory exhausted Starting parse Entering state 0 Stack now 0 @@ -71290,330 +78993,157 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1368: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -71634,7 +79164,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -71653,7 +79183,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -71671,25 +79201,25 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 21 4 Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 21 4 Shifting token error (1.10-12: ) @@ -71700,7 +79230,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -71709,7 +79239,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -71721,18 +79251,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -71744,30 +79274,39 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1370: cat stderr + | (# + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input +input: + | (1 + #) = 1111 +./calc.at:1369: $PREPARSER ./calc input stderr: -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1360: cat stderr +1.2: syntax error: invalid character: '#' ./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +input: + | (* *) + (*) + (*) +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -71778,11 +79317,11 @@ }eg ' expout || exit 77 stderr: -input: -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (* *) + (*) + (*) -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr +./calc.at:1371: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -71792,14 +79331,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: cat stderr -496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... +./calc.at:1367: cat stderr stderr: +input: +1.2: syntax error: invalid character: '#' ./calc.at:1362: cat stderr -./calc.at:1370: mv calc.y.tmp calc.y - -./calc.at:1367: cat stderr input: + | (* *) + (*) + (*) +stderr: +./calc.at:1364: $PREPARSER ./calc input +stderr: + | (- *) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: +input: + | (#) + (#) = 2222 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1362: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -71834,7 +79389,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -71869,7 +79424,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -71878,7 +79433,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -71912,7 +79467,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -71921,7 +79476,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -71932,43 +79487,42 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 2 -./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -./calc.at:1368: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: cat stderr stderr: -input: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1362: $PREPARSER ./calc input - | error +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: -./calc.at:1367: $PREPARSER ./calc input stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1368: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: Starting parse Entering state 0 @@ -72121,7 +79675,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (* *) + (*) + (*) +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -72131,121 +79688,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -72262,9 +79830,19 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1369: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1369: $PREPARSER ./calc input stderr: stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: Starting parse Entering state 0 Stack now 0 @@ -72299,7 +79877,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -72334,7 +79912,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -72343,7 +79921,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -72377,7 +79955,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -72386,7 +79964,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -72397,40 +79975,25 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -72440,143 +80003,173 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -./calc.at:1368: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1370: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -72611,7 +80204,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -72646,7 +80239,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -72655,7 +80248,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -72689,7 +80282,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -72698,7 +80291,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -72709,30 +80302,30 @@ Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1360: cat stderr -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1364: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72742,11 +80335,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1371: cat stderr +1.2: syntax error: invalid character: '#' stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: cat stderr input: -./calc.at:1362: "$PERL" -pi -e 'use strict; +input: +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72756,31 +80350,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: cat stderr -input: -./calc.at:1368: cat stderr -input: -./calc.at:1362: cat stderr - | 1 = 2 = 3 -./calc.at:1367: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1371: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ -./calc.at:1360: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1363: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1367: cat stderr +./calc.at:1362: cat stderr stderr: +./calc.at:1368: cat stderr stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -72790,12 +80379,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -72805,18 +80394,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -72824,7 +80413,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -72845,184 +80434,142 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) stderr: +./calc.at:1369: cat stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: + | (1 + #) = 1111 +./calc.at:1363: cat stderr +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +stderr: +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ + | (1 + 1) / (1 - 1) +./calc.at:1362: $PREPARSER ./calc input +input: +stderr: +input: +./calc.at:1368: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1363: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1369: $PREPARSER ./calc input input: + | 1 + 2 * 3 + !- ++ +./calc.at:1370: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | 1 + 2 * 3 + !+ ++ -./calc.at:1362: $PREPARSER ./calc input -input: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error stderr: -./calc.at:1368: $PREPARSER ./calc input +1.11-17: error: null divisor +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -73032,12 +80579,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -73047,18 +80594,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -73066,7 +80613,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -73087,12 +80634,16 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +stderr: Starting parse Entering state 0 Stack now 0 @@ -73173,162 +80724,16 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -73409,40 +80814,23 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | - | +1 -./calc.at:1367: $PREPARSER ./calc input -stderr: +1.6: syntax error: invalid character: '#' +1.11-17: error: null divisor | 1 + 2 * 3 + !- ++ -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +./calc.at:1364: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -73452,12 +80840,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -73467,18 +80855,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -73486,7 +80874,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -73503,39 +80891,30 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: cat stderr -input: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -input: +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1371: cat stderr stderr: -./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -73545,12 +80924,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -73560,18 +80939,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -73579,7 +80958,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -73600,14 +80979,28 @@ Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Reducing stack by rule 18 (line 118): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) input: - | (* *) + (*) + (*) +./calc.at:1369: cat stderr +./calc.at:1370: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1362: $PREPARSER ./calc input +input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !- ++ +./calc.at:1363: $PREPARSER ./calc input +input: +stderr: +./calc.at:1371: $PREPARSER ./calc input +stderr: +494. calc.at:1368: ok Starting parse Entering state 0 Stack now 0 @@ -73688,214 +81081,43 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1364: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | 1 = 2 = 3 -./calc.at:1368: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +input: +./calc.at:1370: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1369: $PREPARSER ./calc input ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1367: $PREPARSER ./calc input stderr: -./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -73905,18 +81127,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -73924,7 +81146,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -73945,219 +81167,12 @@ Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Reducing stack by rule 18 (line 118): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: -stderr: -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1364: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -./calc.at:1360: cat stderr -./calc.at:1367: $PREPARSER ./calc /dev/null -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -./calc.at:1362: cat stderr -stderr: -input: -stderr: -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | - | +1 -./calc.at:1368: $PREPARSER ./calc input -stdout: Starting parse Entering state 0 Stack now 0 @@ -74202,7 +81217,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -74210,7 +81225,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -74218,979 +81233,122 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +1.14: memory exhausted +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) - | 1 + 2 * 3 + !* ++ -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1363: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1362: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -75235,7 +81393,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -75243,7 +81401,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -75251,976 +81409,85 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 21 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1369: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11-17: error: null divisor +stderr: +498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... +./calc.at:1374: mv calc.y.tmp calc.y +1.14: memory exhausted +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1364: cat stderr +syntax error: invalid character: '#' +input: +./calc.at:1362: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: cat stderr +stderr: +./calc.at:1369: cat stderr +./calc.at:1370: cat stderr +input: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1364: $PREPARSER ./calc input +stderr: +495. calc.at:1369: input: + ok +input: stderr: + | 1 + 2 * 3 + !* ++ Starting parse Entering state 0 Stack now 0 @@ -76291,28 +81558,42 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1363: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1371: cat stderr + +./calc.at:1367: cat stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -76322,12 +81603,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -76337,18 +81618,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -76356,7 +81637,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -76377,15 +81658,13 @@ Shifting token '*' (1.14: ) Entering state 15 Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Reducing stack by rule 19 (line 119): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -76467,12 +81746,8 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -input: -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -76543,26 +81818,48 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | 1 + 2 * 3 + !* ++ + | (1 + # + 1) = 1111 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stderr: +499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... +stderr: +stderr: +./calc.at:1375: mv calc.y.tmp calc.y + +1.14: memory exhausted +./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -76572,12 +81869,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -76587,18 +81884,18 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -76606,7 +81903,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -76627,41 +81924,13 @@ Shifting token '*' (1.14: ) Entering state 15 Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Reducing stack by rule 19 (line 119): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 2 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -stderr: -./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -76743,24 +82012,10 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1369: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76770,20 +82025,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1370: cat stderr stderr: -stderr: -./calc.at:1368: $PREPARSER ./calc /dev/null -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76793,123 +82037,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1360: cat stderr -stderr: -stderr: -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1362: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76919,102 +82048,157 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1360: $PREPARSER ./calc input +1.14: memory exhausted input: + | (1 + #) = 1111 +./calc.at:1370: $PREPARSER ./calc input +syntax error: invalid character: '#' ./calc.at:1364: cat stderr +./calc.at:1363: cat stderr +./calc.at:1362: cat stderr stderr: +./calc.at:1371: cat stderr +1.6: syntax error: invalid character: '#' +input: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +input: +input: +./calc.at:1364: $PREPARSER ./calc input | (#) + (#) = 2222 input: +./calc.at:1363: $PREPARSER ./calc input + | (#) + (#) = 2222 ./calc.at:1362: $PREPARSER ./calc input -./calc.at:1368: cat stderr -./calc.at:1367: cat stderr - | 1 2 + | (#) + (#) = 2222 +./calc.at:1371: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1369: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1367: cat stderr Starting parse Entering state 0 Stack now 0 @@ -77028,8 +82212,8 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) @@ -77040,7 +82224,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -77062,8 +82246,8 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) Error: popping token error (1.8: ) Stack now 0 8 21 4 Shifting token error (1.8: ) @@ -77074,7 +82258,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -77083,7 +82267,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -77095,18 +82279,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -77117,26 +82301,30 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: Starting parse Entering state 0 Stack now 0 @@ -77258,61 +82446,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -input: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: - | 1//2 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1368: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1367: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: Starting parse Entering state 0 Stack now 0 @@ -77326,8 +82459,8 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) @@ -77338,7 +82471,7 @@ Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -77360,8 +82493,8 @@ Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) Error: popping token error (1.8: ) Stack now 0 8 21 4 Shifting token error (1.8: ) @@ -77372,7 +82505,7 @@ Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -77381,7 +82514,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -77393,18 +82526,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -77415,30 +82548,33 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: Starting parse Entering state 0 @@ -77561,267 +82697,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -stderr: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1360: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: cat stderr -./calc.at:1369: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: cat stderr -input: -input: -./calc.at:1364: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1367: cat stderr -./calc.at:1360: $PREPARSER ./calc input - | 1//2 -./calc.at:1369: $PREPARSER ./calc input -stderr: - | (!!) + (1 2) = 1 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -77831,110 +82706,144 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | (1 + #) = 1111 +./calc.at:1370: cat stderr input: -./calc.at:1362: $PREPARSER ./calc input stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 - | error -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1371: cat stderr input: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1371: $PREPARSER ./calc input stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1367: cat stderr +493. calc.at:1367: ok + +500. calc.at:1387: testing Calculator %glr-parser ... +./calc.at:1387: mv calc.y.tmp calc.y + +./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -77944,10 +82853,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1362: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1362: $PREPARSER ./calc input stderr: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1367: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -78046,6 +82956,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -78144,28 +83056,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78175,18 +83066,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: cat stderr -stderr: -./calc.at:1368: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: cat stderr +./calc.at:1363: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input +input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 + | (1 + #) = 1111 +1.6: syntax error: invalid character: '#' +./calc.at:1363: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -78197,32 +83096,29 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -78286,24 +83182,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -input: - | (#) + (#) = 2222 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1360: cat stderr -input: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -78314,17 +83192,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input - | error -stderr: -./calc.at:1369: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -78334,102 +83203,79 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -78446,19 +83292,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1360: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: ./calc.at:1364: cat stderr stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +./calc.at:1371: cat stderr + | (# + 1) = 1111 +./calc.at:1370: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -78554,7 +83393,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: stderr: Starting parse Entering state 0 @@ -78565,102 +83403,79 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -78677,22 +83492,20 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (* *) + (*) + (*) -./calc.at:1362: cat stderr -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: - | 1 = 2 = 3 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr +1.2: syntax error: invalid character: '#' +input: +input: + | (# + 1) = 1111 +./calc.at:1371: $PREPARSER ./calc input + | (1 + #) = 1111 ./calc.at:1364: $PREPARSER ./calc input -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78702,7 +83515,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -78712,29 +83525,32 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -78744,7 +83560,7 @@ Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -78757,18 +83573,18 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): +Reducing stack by rule 6 (line 89): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) @@ -78779,234 +83595,150 @@ Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr stderr: +./calc.at:1363: cat stderr +1.2: syntax error: invalid character: '#' stderr: +./calc.at:1370: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | (# + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1363: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: -stderr: - | (* *) + (*) + (*) -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): +Reducing stack by rule 4 (line 84): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: cat stderr -stderr: -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) input: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1371: cat stderr +input: + | (# + 1) = 1111 ./calc.at:1363: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 stderr: Starting parse Entering state 0 @@ -79017,76 +83749,95 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -79103,11 +83854,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr +./calc.at:1370: $PREPARSER ./calc input stderr: stderr: -input: -./calc.at:1367: cat stderr Starting parse Entering state 0 Stack now 0 @@ -79117,32 +83866,29 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -79206,12 +83952,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | 1 = 2 = 3 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -79222,130 +83962,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1367: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token 1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) | (1 + # + 1) = 1111 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1368: cat stderr -./calc.at:1364: cat stderr +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1371: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1371: $PREPARSER ./calc input stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr Starting parse Entering state 0 Stack now 0 @@ -79461,8 +84096,9 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -input: -./calc.at:1363: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79472,45 +84108,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | - | +1 -./calc.at:1364: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1370: cat stderr +./calc.at:1371: cat stderr +497. calc.at:1371: ok input: + | (1 + 1) / (1 - 1) +./calc.at:1370: $PREPARSER ./calc input + stderr: -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1363: cat stderr +1.11-17: error: null divisor +./calc.at:1362: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (1 + 1) / (1 - 1) +./calc.at:1362: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +stderr: Starting parse Entering state 0 Stack now 0 @@ -79535,80 +84151,107 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -79625,20 +84268,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -stderr: -./calc.at:1369: cat stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1367: $PREPARSER ./calc input -input: +501. calc.at:1389: testing Calculator %glr-parser %header ... +./calc.at:1389: mv calc.y.tmp calc.y + +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (# + 1) = 1111 +./calc.at:1370: cat stderr Starting parse Entering state 0 Stack now 0 @@ -79663,80 +84299,107 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -79753,12 +84416,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1363: $PREPARSER ./calc input -input: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +496. calc.at:1370: ok +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79768,41 +84427,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1369: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: + +./calc.at:1362: cat stderr +490. calc.at:1362: ok stderr: -input: +./calc.at:1364: cat stderr Starting parse Entering state 0 Stack now 0 @@ -79812,111 +84441,113 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +502. calc.at:1390: testing Calculator %glr-parser %locations ... +./calc.at:1390: mv calc.y.tmp calc.y + + +./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1363: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -80012,30 +84643,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1368: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -stderr: -stderr: -./calc.at:1369: cat stderr Starting parse Entering state 0 Stack now 0 @@ -80131,22 +84741,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1369: $PREPARSER ./calc /dev/null -stderr: + | (1 + # + 1) = 1111 +./calc.at:1363: $PREPARSER ./calc input stderr: -./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -80156,12 +84753,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -80171,480 +84768,96 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: $PREPARSER ./calc /dev/null -./calc.at:1362: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -./calc.at:1363: cat stderr -1.14: memory exhausted -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1362: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -1.14: memory exhausted -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): +Reducing stack by rule 1 (line 78): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1369: cat stderr -input: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 - | 1 + 2 * 3 + !* ++ -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1368: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -80655,6 +84868,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1364: cat stderr Starting parse Entering state 0 Stack now 0 @@ -80769,166 +84985,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) input: -memory exhausted -./calc.at:1360: cat stderr -stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1368: $PREPARSER ./calc input -stderr: + | (1 + # + 1) = 1111 +./calc.at:1364: $PREPARSER ./calc input +503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... +./calc.at:1391: mv calc.y.tmp calc.y + stderr: -./calc.at:1364: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -81043,33 +85106,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -stderr: -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -489. calc.at:1360: input: - ok -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1364: $PREPARSER ./calc input ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -81080,11 +85116,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1362: cat stderr -./calc.at:1369: cat stderr -./calc.at:1368: cat stderr -./calc.at:1363: cat stderr +./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1363: cat stderr Starting parse Entering state 0 Stack now 0 @@ -81094,298 +85129,95 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 4 12 21 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -81402,23 +85234,21 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr - -input: -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -490. calc.at:1362: | (1 + #) = 1111 -./calc.at:1368: $PREPARSER ./calc input - | (!!) + (1 2) = 1 - ok input: -./calc.at:1369: $PREPARSER ./calc input -stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (1 + 1) / (1 - 1) -stderr: ./calc.at:1363: $PREPARSER ./calc input +./calc.at:1364: cat stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -81428,298 +85258,122 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '+' (1.20: ) +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) + $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -81736,14 +85390,11 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1367: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1364: $PREPARSER ./calc input stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -81885,27 +85536,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' - -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -82048,9 +85678,7 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr -input: -./calc.at:1368: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -82061,16 +85689,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1364: $PREPARSER ./calc input -stderr: -./calc.at:1367: cat stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 stderr: -input: -./calc.at:1363: cat stderr -input: Starting parse Entering state 0 Stack now 0 @@ -82080,276 +85699,122 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (# + 1) = 1111 -./calc.at:1368: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1369: cat stderr -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -491. calc.at:1363: ok -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token Next token is token "number" (1.16: 1) Shifting token "number" (1.16: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 88): $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -82366,19 +85831,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1369: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... -./calc.at:1371: mv calc.y.tmp calc.y - +./calc.at:1363: cat stderr +./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -82389,348 +85843,103 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1367: cat stderr ./calc.at:1364: cat stderr -./calc.at:1368: cat stderr +491. calc.at:1363: ok +492. calc.at:1364: ok + + +./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... +./calc.at:1392: mv calc.y.tmp calc.y + +./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1364: $PREPARSER ./calc input - | (# + 1) = 1111 -498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... -./calc.at:1374: mv calc.y.tmp calc.y +stdout: +./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... +./calc.at:1393: mv calc.y.tmp calc.y + +./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1374: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c input: - | (1 + # + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1374: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1369: cat stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +stdout: input: -1.6: syntax error: invalid character: '#' +./calc.at:1387: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | 1 2 +./calc.at:1374: $PREPARSER ./calc input stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1369: $PREPARSER ./calc input -syntax error: invalid character: '#' +syntax error +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1387: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +input: +./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 1 2 +./calc.at:1387: $PREPARSER ./calc input stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.6: syntax error: invalid character: '#' -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -82740,351 +85949,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: cat stderr -stderr: -./calc.at:1368: cat stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1364: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1369: cat stderr -input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1368: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: cat stderr input: - | (* *) + (*) + (*) -./calc.at:1364: $PREPARSER ./calc input -499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... + | 1//2 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1387: cat stderr stderr: -./calc.at:1375: mv calc.y.tmp calc.y - -1.11-17: error: null divisor +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !+ ++ -./calc.at:1369: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -stderr: +syntax error + | 1//2 +./calc.at:1387: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83094,374 +85987,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -494. calc.at:1368: ./calc.at:1367: cat stderr - ok -./calc.at:1364: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1369: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1367: $PREPARSER ./calc input -stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: cat stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1364: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -error: null divisor -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: cat stderr + | error +./calc.at:1387: cat stderr +./calc.at:1374: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1367: cat stderr stderr: -1.14: memory exhausted -input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1364: $PREPARSER ./calc input -493. calc.at:1367: ok + | error +./calc.at:1387: $PREPARSER ./calc input +syntax error stderr: +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.14: memory exhausted -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - -./calc.at:1369: cat stderr -500. calc.at:1387: testing Calculator %glr-parser ... -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83471,18 +86026,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: mv calc.y.tmp calc.y - -stderr: stdout: -input: -./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1364: cat stderr -./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | (#) + (#) = 2222 -./calc.at:1369: $PREPARSER ./calc input -stderr: -./calc.at:1370: "$PERL" -ne ' +./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1375: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -83493,13 +86039,22 @@ || /\t/ )' calc.c -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1374: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1364: $PREPARSER ./calc input + | 1 = 2 = 3 input: +./calc.at:1374: $PREPARSER ./calc input +stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -83513,197 +86068,47 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: +./calc.at:1375: $PREPARSER ./calc input +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1369: cat stderr +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1387: cat stderr input: +input: +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1387: $PREPARSER ./calc input | 1 2 +./calc.at:1375: $PREPARSER ./calc input +stderr: +syntax error +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected number +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1374: cat stderr +stderr: +1.3: syntax error, unexpected number input: -./calc.at:1370: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + #) = 1111 -./calc.at:1369: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -501. calc.at:1389: testing Calculator %glr-parser %header ... -./calc.at:1389: mv calc.y.tmp calc.y - -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83713,277 +86118,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -./calc.at:1364: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1370: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1369: cat stderr -./calc.at:1364: $PREPARSER ./calc input -input: - | 1//2 -./calc.at:1370: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (# + 1) = 1111 -./calc.at:1369: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1370: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83993,242 +86128,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1364: cat stderr - | error -./calc.at:1370: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1375: cat stderr input: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: cat stderr - | (1 + #) = 1111 -./calc.at:1364: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: + | 1//2 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1387: cat stderr stderr: - | (1 + # + 1) = 1111 -./calc.at:1369: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 = 2 = 3 -1.6: syntax error: invalid character: '#' -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1369: cat stderr -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | + | +1 +./calc.at:1387: $PREPARSER ./calc input stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84238,226 +86157,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1370: cat stderr stderr: -1.11-17: error: null divisor -./calc.at:1364: cat stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +syntax error +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1375: cat stderr +syntax error input: -1.11-17: error: null divisor - | (# + 1) = 1111 -./calc.at:1364: $PREPARSER ./calc input - | - | +1 -./calc.at:1370: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1369: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -495. calc.at:1369: ok -./calc.at:1370: cat stderr -./calc.at:1370: $PREPARSER ./calc /dev/null -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84467,266 +86175,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - -./calc.at:1364: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1364: $PREPARSER ./calc input -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -502. calc.at:1390: testing Calculator %glr-parser %locations ... -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1374: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84736,311 +86186,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: mv calc.y.tmp calc.y - -./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1370: cat stderr -./calc.at:1364: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1370: $PREPARSER ./calc input -input: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1364: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1375: $PREPARSER ./calc input stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +1.1: syntax error, unexpected invalid token +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected invalid token +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85050,53 +86203,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: cat stderr input: - | (- *) + (1 2) = 1 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1364: cat stderr -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -492. calc.at:1364: ok -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1370: cat stderr -./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS - -input: - | (* *) + (*) + (*) -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1370: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1370: $PREPARSER ./calc input -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1375: $PREPARSER ./calc input stderr: -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1370: $PREPARSER ./calc input +1.7: syntax error, unexpected '=' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.7: syntax error, unexpected '=' stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -./calc.at:1374: "$PERL" -ne ' +./calc.at:1391: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -85107,15 +86225,8 @@ || /\t/ )' calc.c -503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... -./calc.at:1370: cat stderr -./calc.at:1391: mv calc.y.tmp calc.y - -input: +./calc.at:1374: $PREPARSER ./calc /dev/null input: - | 1 + 2 * 3 + !* ++ -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -85129,35 +86240,72 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1374: $PREPARSER ./calc input -1.14: memory exhausted -stderr: +syntax error ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +./calc.at:1387: $PREPARSER ./calc /dev/null stderr: +syntax error stderr: -1.14: memory exhausted -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1370: cat stderr +stderr: +syntax error input: - | 1 2 input: -./calc.at:1374: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1370: $PREPARSER ./calc input + | 1 2 + | + | +1 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1390: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +2.1: syntax error, unexpected '+' stderr: -syntax error -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1390: $PREPARSER ./calc input ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85168,28 +86316,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr -input: -./calc.at:1374: cat stderr - | (1 + #) = 1111 -./calc.at:1370: $PREPARSER ./calc input -input: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1//2 -./calc.at:1374: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' +1.3: syntax error +2.1: syntax error, unexpected '+' stderr: -syntax error -./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1370: cat stderr input: -./calc.at:1374: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85199,29 +86338,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' ./calc.at:1374: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error +./calc.at:1391: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1374: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' +1.3: syntax error +./calc.at:1375: cat stderr +./calc.at:1387: cat stderr stderr: syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: cat stderr -stderr: syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1375: $PREPARSER ./calc /dev/null +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + # + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1374: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85231,48 +86390,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -input: +./calc.at:1391: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' +1.1: syntax error, unexpected end of file +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 = 2 = 3 -./calc.at:1374: $PREPARSER ./calc input +1.3: syntax error stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: cat stderr -./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: +1.1: syntax error, unexpected end of file +./calc.at:1390: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error stderr: syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -input: -./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -======== Testing with C++ standard flags: '' -./calc.at:1371: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - - | (1 + 1) / (1 - 1) -./calc.at:1370: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1375: "$PERL" -ne ' +stdout: +./calc.at:1392: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -85283,8 +86440,16 @@ || /\t/ )' calc.c -1.11-17: error: null divisor -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85295,6 +86460,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +input: +stdout: + | 1//2 +./calc.at:1390: $PREPARSER ./calc input input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -85309,10 +86479,33 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1371: $PREPARSER ./calc input -input: +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: +1.3: syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -85326,42 +86519,11 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1375: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -stderr: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1370: cat stderr - | - | +1 -./calc.at:1374: $PREPARSER ./calc input -stderr: -input: - | 1 2 -stderr: -./calc.at:1371: $PREPARSER ./calc input -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -496. calc.at:1370: ./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr - ok -stderr: -stderr: -syntax error -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 2 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1375: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected number -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1375: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85371,69 +86533,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1371: cat stderr -stderr: -1.3: syntax error, unexpected number -input: - | 1//2 -./calc.at:1371: $PREPARSER ./calc input ./calc.at:1374: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1374: $PREPARSER ./calc /dev/null -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr +./calc.at:1391: cat stderr +input: stderr: -syntax error + | 1 2 +./calc.at:1392: $PREPARSER ./calc input stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error syntax error -./calc.at:1371: cat stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: input: - | 1//2 +./calc.at:1387: cat stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: + | (!!) + (1 2) = 1 +./calc.at:1374: $PREPARSER ./calc input ./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | error -./calc.at:1371: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1374: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stdout: stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1375: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85444,45 +86572,46 @@ }eg ' expout || exit 77 input: -504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... -./calc.at:1392: mv calc.y.tmp calc.y - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1371: cat stderr -./calc.at:1375: cat stderr stderr: -./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -input: -./calc.at:1371: $PREPARSER ./calc input -stderr: +1.1: syntax error + | (!!) + (1 2) = 1 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | error -./calc.at:1375: $PREPARSER ./calc input +input: stderr: + | 1 2 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +stderr: syntax error +stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +error: 2222 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.1: syntax error +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 syntax error -error: 4444 != 1 -1.1: syntax error, unexpected invalid token -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: 2222 != 1 +./calc.at:1390: cat stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1371: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85493,7 +86622,10 @@ }eg ' expout || exit 77 input: -./calc.at:1375: "$PERL" -pi -e 'use strict; + | error +./calc.at:1390: $PREPARSER ./calc input +syntax error +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85503,34 +86635,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1374: cat stderr -./calc.at:1371: $PREPARSER ./calc input -stderr: -./calc.at:1375: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: - | 1 = 2 = 3 - | (!!) + (1 2) = 1 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: $PREPARSER ./calc input -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1371: cat stderr -./calc.at:1371: $PREPARSER ./calc /dev/null ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85541,43 +86645,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) syntax error error: 2222 != 1 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1375: cat stderr -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: -stdout: -./types.at:139: $PREPARSER ./test - | - | +1 -./calc.at:1375: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1371: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error, unexpected '+' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1371: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85587,15 +86658,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1374: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr +./calc.at:1392: cat stderr +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1375: $PREPARSER ./calc input +452. types.at:139: ok +input: +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85605,18 +86680,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: cat stderr -./calc.at:1375: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error, unexpected end of file +input: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected end of file + | 1 = 2 = 3 + | 1//2 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input + ./calc.at:1374: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1375: "$PERL" -pi -e 'use strict; +1.1: syntax error +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85626,31 +86702,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1371: $PREPARSER ./calc input -syntax error -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error stderr: -./calc.at:1375: cat stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error -syntax error -error: 2222 != 1 -input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1375: $PREPARSER ./calc input -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1389: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85660,45 +86721,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: cat stderr input: | (- *) + (1 2) = 1 -./calc.at:1371: $PREPARSER ./calc input -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) ./calc.at:1374: $PREPARSER ./calc input stderr: stderr: syntax error syntax error -syntax error +error: 2222 != 1 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -syntax error -syntax error -syntax error -./calc.at:1371: cat stderr +input: + | 1//2 +./calc.at:1389: $PREPARSER ./calc input +1.7: syntax error ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85710,12 +86745,18 @@ }eg ' expout || exit 77 stderr: -input: - | (* *) + (*) + (*) +stderr: +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: cat stderr -./calc.at:1371: $PREPARSER ./calc input -stdout: -./calc.at:1374: "$PERL" -pi -e 'use strict; +syntax error +syntax error +error: 2222 != 1 +./calc.at:1390: cat stderr +stderr: +./calc.at:1387: cat stderr +syntax error +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85725,44 +86766,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./types.at:139: ./check input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 + | (- *) + (1 2) = 1 ./calc.at:1375: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1374: cat stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) input: - | 1 + 2 * 3 + !+ ++ +input: stderr: -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1371: cat stderr + | (- *) + (1 2) = 1 +syntax error +./calc.at:1387: $PREPARSER ./calc input stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: + | 1 = 2 = 3 +./calc.at:1390: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: $PREPARSER ./calc input +syntax error +syntax error +error: 2222 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1375: "$PERL" -pi -e 'use strict; +1.7: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +506. calc.at:1394: testing Calculator %glr-parser %verbose ... +./calc.at:1394: mv calc.y.tmp calc.y + +./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1391: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85772,25 +86805,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1375: $PREPARSER ./calc input input: + | + | +1 +./calc.at:1391: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1371: $PREPARSER ./calc input 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85801,14 +86824,14 @@ }eg ' expout || exit 77 stderr: +1.7: syntax error stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +2.1: syntax error stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85819,35 +86842,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1371: cat stderr +./calc.at:1374: cat stderr input: -stderr: | (* *) + (*) + (*) -memory exhausted -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1371: $PREPARSER ./calc input -stderr: -memory exhausted -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1389: cat stderr +./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85857,7 +86858,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1375: cat stderr +2.1: syntax error +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85867,40 +86871,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: cat stderr -stderr: -1.14: memory exhausted -./calc.at:1374: cat stderr -input: -./calc.at:1371: cat stderr -input: input: - | (#) + (#) = 2222 -./calc.at:1374: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' + | error +./calc.at:1389: $PREPARSER ./calc input +syntax error +syntax error +syntax error +input: ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ + | (* *) + (*) + (*) ./calc.at:1375: $PREPARSER ./calc input stderr: - | (#) + (#) = 2222 -./calc.at:1371: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85910,29 +86901,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | (1 + #) = 1111 -./calc.at:1371: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: cat stderr -stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1371: cat stderr -stderr: -input: - | (# + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85942,31 +86911,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1375: cat stderr -./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: cat stderr +input: stderr: -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' + | + | +1 +./calc.at:1390: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error input: -./calc.at:1371: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1375: $PREPARSER ./calc input stderr: -1.14: memory exhausted -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input stderr: + | error +./calc.at:1392: $PREPARSER ./calc input +2.1: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1387: cat stderr +./calc.at:1391: cat stderr +stderr: +syntax error +./calc.at:1391: $PREPARSER ./calc /dev/null stderr: -1.14: memory exhausted -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85976,12 +86948,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +2.1: syntax error + | (* *) + (*) + (*) +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85991,67 +86963,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1374: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -1.6: syntax error: invalid character: '#' -input: -./calc.at:1375: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: - | (# + 1) = 1111 -./calc.at:1374: $PREPARSER ./calc input -stdout: -./calc.at:1387: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1371: cat stderr -stderr: -input: -syntax error: invalid character: '#' - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (#) + (#) = 2222 -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + 1) / (1 - 1) -input: ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86062,13 +86973,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1387: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1375: cat stderr +syntax error +syntax error syntax error stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.1: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86078,25 +86994,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1374: cat stderr +./calc.at:1389: cat stderr +stderr: +stderr: +1.1: syntax error +syntax error +syntax error syntax error -./calc.at:1375: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1390: cat stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.11-17: error: null divisor - | (1 + # + 1) = 1111 -./calc.at:1374: $PREPARSER ./calc input input: -stderr: -syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 + | 1 + 2 * 3 + !+ ++ +./calc.at:1390: $PREPARSER ./calc /dev/null +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86107,16 +87026,7 @@ }eg ' expout || exit 77 stderr: -./calc.at:1371: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -1.6: syntax error: invalid character: '#' -497. calc.at:1371: ok -./calc.at:1387: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86126,7 +87036,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +stderr: +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1392: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86136,34 +87062,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: cat stderr -./calc.at:1374: cat stderr -input: - - | 1//2 -./calc.at:1387: $PREPARSER ./calc input +syntax error +stderr: +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error input: + | 1 = 2 = 3 input: +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1391: cat stderr + | 1 + 2 * 3 + !- ++ stderr: - | (# + 1) = 1111 -./calc.at:1375: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + 1) / (1 - 1) -./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1387: cat stderr stderr: stderr: -syntax error -error: null divisor -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 stderr: -error: null divisor -1.2: syntax error: invalid character: '#' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1374: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86174,9 +87108,6 @@ }eg ' expout || exit 77 stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86187,9 +87118,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: cat stderr -./calc.at:1374: cat stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86199,27 +87130,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | 123 -./calc.at:1374: $PREPARSER ./calc --num input - | (1 + # + 1) = 1111 -stderr: -./calc.at:1375: $PREPARSER ./calc input -505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... -./calc.at:1393: mv calc.y.tmp calc.y - -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1387: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86229,43 +87140,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 -./calc.at:1374: $PREPARSER ./calc --num input -input: - | error -./calc.at:1387: $PREPARSER ./calc input stderr: -./calc.at:1375: cat stderr -syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: cat stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1387: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1374: cat stderr +input: + | 1 + 2 * 3 + !* ++ ./calc.at:1375: $PREPARSER ./calc input -syntax error -stderr: -1.11-17: error: null divisor -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.11-17: error: null divisor stderr: -syntax error -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86275,19 +87169,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: cat stderr -input: -./calc.at:1387: cat stderr - | 123 +1.14: memory exhausted input: -./calc.at:1375: $PREPARSER ./calc --num input -stderr: - | 1 = 2 = 3 -./calc.at:1387: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1389: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86298,31 +87186,24 @@ }eg ' expout || exit 77 stderr: -syntax error -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1374: cat stderr +memory exhausted +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stdout: - | 1 + 2 * 3 -./calc.at:1375: $PREPARSER ./calc --num input -./calc.at:1387: cat stderr + | + | +1 +./calc.at:1389: $PREPARSER ./calc input stderr: -input: -./calc.at:1389: "$PERL" -ne ' +stdout: +./calc.at:1393: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -86331,21 +87212,9 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c calc.h + )' calc.c -1.3: syntax error, unexpected '+', expecting end of file -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -stderr: -1.3: syntax error, unexpected '+', expecting end of file -./calc.at:1387: $PREPARSER ./calc input - | 1 + 2 * 3 -./calc.at:1374: $PREPARSER ./calc --exp input input: -stderr: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -86359,10 +87228,30 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1389: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +stderr: +1.14: memory exhausted +stderr: +stderr: +./calc.at:1391: cat stderr +stderr: syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +memory exhausted +stderr: +./calc.at:1392: cat stderr +input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | (!!) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input +input: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86372,38 +87261,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1375: cat stderr -syntax error -input: -stderr: -stdout: - | 1 2 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: -stderr: - | 1 + 2 * 3 -syntax error -./calc.at:1375: $PREPARSER ./calc --exp input -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86415,37 +87276,7 @@ ' expout || exit 77 stderr: stderr: -syntax error -stderr: -input: -498. calc.at:1374: ok -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1390: $PREPARSER ./calc input -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -input: -./calc.at:1387: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86455,29 +87286,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1390: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./calc.at:1387: $PREPARSER ./calc /dev/null - -stderr: -499. calc.at:1375: ok -1.3: syntax error -stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr stderr: stderr: -1.3: syntax error syntax error -input: - | 1//2 -./calc.at:1389: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1387: cat stderr stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86487,10 +87307,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86500,29 +87318,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: stderr: -./calc.at:1390: cat stderr -syntax error + | (#) + (#) = 2222 input: -./calc.at:1387: cat stderr - | 1//2 -./calc.at:1390: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1390: cat stderr + | 1 2 +./calc.at:1393: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error + | 1 + 2 * 3 + !* ++ +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1387: $PREPARSER ./calc input -1.3: syntax error +input: + | (!!) + (1 2) = 1 stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +./calc.at:1390: $PREPARSER ./calc input +memory exhausted ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1374: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86532,16 +87358,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -506. calc.at:1394: testing Calculator %glr-parser %verbose ... +./calc.at:1389: cat stderr stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1394: mv calc.y.tmp calc.y - -./calc.at:1389: "$PERL" -pi -e 'use strict; +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86551,9 +87373,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1390: cat stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1389: $PREPARSER ./calc /dev/null +stderr: +stderr: +stderr: +memory exhausted +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86563,41 +87399,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | error -./calc.at:1390: $PREPARSER ./calc input -507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... -stderr: -./calc.at:1389: cat stderr -./calc.at:1387: cat stderr -1.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: mv calc.y.tmp calc.y - -input: -./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | (!!) + (1 2) = 1 - | error -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input -stderr: -stderr: -syntax error -error: 2222 != 1 -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./calc.at:1391: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1392: cat stderr stderr: -1.1: syntax error -./types.at:139: ./check -syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -error: 2222 != 1 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86607,9 +87417,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -stderr: +syntax error +input: +./calc.at:1392: $PREPARSER ./calc /dev/null + | (- *) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1375: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86620,16 +87436,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1390: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr -======== Testing with C++ standard flags: '' -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86639,26 +87446,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1390: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1387: $PREPARSER ./calc input stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 syntax error -syntax error -error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr stderr: -1.7: syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: + | 1//2 +./calc.at:1393: $PREPARSER ./calc input stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 syntax error -syntax error -error: 2222 != 1 -1.7: syntax error -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86668,17 +87477,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: - | 1 = 2 = 3 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1390: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86688,62 +87487,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1390: cat stderr stderr: +./calc.at:1387: cat stderr stderr: -stdout: -input: +1.6: syntax error: invalid character: '#' syntax error -stderr: - | - | +1 -stdout: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 ./calc.at:1390: $PREPARSER ./calc input -./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1391: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1387: cat stderr -2.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -input: stderr: - | (* *) + (*) + (*) +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 + | (#) + (#) = 2222 ./calc.at:1387: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -2.1: syntax error -./calc.at:1391: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: cat stderr syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1374: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86754,19 +87530,30 @@ }eg ' expout || exit 77 stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1375: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1389: $PREPARSER ./calc input +input: +./calc.at:1391: cat stderr + | (1 + #) = 1111 +./calc.at:1374: $PREPARSER ./calc input stderr: syntax error syntax error syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 -./calc.at:1391: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -1.3: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +syntax error +error: 4444 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86776,7 +87563,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1391: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86786,12 +87581,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1389: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1387: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86802,45 +87592,34 @@ }eg ' expout || exit 77 input: +./calc.at:1392: cat stderr stderr: -stdout: - | - | +1 -./calc.at:1389: $PREPARSER ./calc input + | (# + 1) = 1111 stderr: -input: -./types.at:139: ./check +./calc.at:1375: $PREPARSER ./calc input +syntax error: invalid character: '#' syntax error -./calc.at:1390: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1391: cat stderr -stderr: -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1390: $PREPARSER ./calc /dev/null -input: -stderr: - | 1//2 -./calc.at:1391: $PREPARSER ./calc input -stderr: -1.1: syntax error +syntax error +syntax error +error: 4444 != 1 stderr: -input: -1.3: syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1392: $PREPARSER ./calc input stderr: -1.3: syntax error +1.2: syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: -1.1: syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86850,8 +87629,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1393: cat stderr +input: +./calc.at:1390: cat stderr + | error +./calc.at:1393: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +input: + | (* *) + (*) + (*) +./calc.at:1390: $PREPARSER ./calc input +stderr: +stderr: +syntax error +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86861,7 +87658,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86871,18 +87668,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1391: cat stderr -./calc.at:1389: cat stderr -./calc.at:1390: cat stderr -input: -./calc.at:1389: $PREPARSER ./calc /dev/null - | error -./calc.at:1391: $PREPARSER ./calc input -input: -stderr: -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86892,31 +87684,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1390: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -1.1: syntax error -stderr: -syntax error 1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +1.10: syntax error +1.16: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1387: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86926,11 +87699,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +syntax error +./calc.at:1387: cat stderr input: - | 1 + 2 * 3 + !* ++ + | (1 + #) = 1111 ./calc.at:1387: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1389: cat stderr +./calc.at:1374: cat stderr +./calc.at:1375: cat stderr +./calc.at:1391: cat stderr +stderr: +input: +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error + | (!!) + (1 2) = 1 +./calc.at:1389: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86940,7 +87729,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86950,59 +87739,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -memory exhausted -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: cat stderr -stderr: -./calc.at:1390: cat stderr -memory exhausted -./calc.at:1389: cat stderr -input: input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (!!) + (1 2) = 1 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input input: -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error +stderr: + | (1 + # + 1) = 1111 syntax error -error: 4444 != 1 +error: 2222 != 1 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 +syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input stderr: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1391: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 stderr: -1.7: syntax error +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr stderr: -stdout: ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87013,12 +87773,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: ./check +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error +error: 2222 != 1 +./calc.at:1392: cat stderr +./calc.at:1393: cat stderr stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.6: syntax error: invalid character: '#' input: -1.7: syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1391: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1390: cat stderr +stderr: +input: +stderr: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87028,10 +87802,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1387: $PREPARSER ./calc input -stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87041,20 +87812,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: -./calc.at:1391: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1390: cat stderr -./calc.at:1389: $PREPARSER ./calc input -input: -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87064,35 +87827,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1391: $PREPARSER ./calc input -input: +./calc.at:1393: $PREPARSER ./calc input stderr: +stderr: +input: syntax error error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (- *) + (1 2) = 1 + | 1 + 2 * 3 + !+ ++ ./calc.at:1390: $PREPARSER ./calc input -stderr: -2.1: syntax error -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr +./calc.at:1375: cat stderr syntax error -error: 2222 != 1 -stderr: -stderr: -2.1: syntax error -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87102,7 +87849,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1389: cat stderr +stderr: +./calc.at:1374: cat stderr +stderr: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87112,7 +87863,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1375: $PREPARSER ./calc input +stderr: + | (- *) + (1 2) = 1 +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87122,26 +87882,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1387: cat stderr +stderr: input: - | (1 + #) = 1111 -./calc.at:1389: cat stderr -./calc.at:1387: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input +1.11-17: error: null divisor +input: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ ./calc.at:1391: cat stderr -./calc.at:1390: cat stderr stderr: -syntax error: invalid character: '#' -input: stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1391: $PREPARSER ./calc /dev/null -input: - | (- *) + (1 2) = 1 -./calc.at:1389: $PREPARSER ./calc input -stdout: - | (* *) + (*) + (*) ./calc.at:1390: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -ne ' +stderr: +syntax error: invalid character: '#' +stdout: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -87154,26 +87926,47 @@ stderr: stderr: -syntax error: invalid character: '#' +input: +1.11-17: error: null divisor +stderr: + | (# + 1) = 1111 +./calc.at:1392: cat stderr +./calc.at:1387: $PREPARSER ./calc input stderr: +input: syntax error syntax error error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1391: $PREPARSER ./calc input +syntax error: invalid character: '#' stderr: +syntax error: invalid character: '#' +stderr: +./calc.at:1393: cat stderr +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +stderr: + | (- *) + (1 2) = 1 +syntax error: invalid character: '#' stderr: +./calc.at:1375: cat stderr input: -1.2: syntax error -1.10: syntax error -1.16: syntax error -1.1: syntax error | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -87187,14 +87980,20 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1394: $PREPARSER ./calc input +1.14: memory exhausted +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1392: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87204,7 +88003,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1393: $PREPARSER ./calc input +syntax error +syntax error +error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 123 +stderr: +stderr: +./calc.at:1375: $PREPARSER ./calc --num input +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87214,7 +88028,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87225,8 +88040,11 @@ }eg ' expout || exit 77 stderr: -./calc.at:1387: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87236,35 +88054,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 2 -./calc.at:1392: $PREPARSER ./calc input -input: +./calc.at:1389: cat stderr ./calc.at:1390: cat stderr + | 1 2 +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1391: cat stderr - | (# + 1) = 1111 -./calc.at:1387: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -input: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1389: cat stderr -stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: +input: +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr | (* *) + (*) + (*) + | 1 + 2 * 3 + !* ++ +./calc.at:1374: cat stderr ./calc.at:1389: $PREPARSER ./calc input -input: ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87275,20 +88084,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ ./calc.at:1390: $PREPARSER ./calc input stderr: syntax error -syntax error -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr stderr: +1.14: memory exhausted +input: syntax error syntax error syntax error -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 +./calc.at:1375: $PREPARSER ./calc --num input +input: +stderr: +stderr: +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87299,13 +88115,24 @@ }eg ' expout || exit 77 input: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1391: $PREPARSER ./calc input +syntax error +syntax error +syntax error + | (1 + 1) / (1 - 1) +./calc.at:1374: $PREPARSER ./calc input +1.14: memory exhausted +1.3: syntax error, unexpected '+', expecting end of file ./calc.at:1392: cat stderr -./calc.at:1387: cat stderr +input: + | (1 + # + 1) = 1111 + | (#) + (#) = 2222 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +error: null divisor +stderr: +stderr: +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87315,7 +88142,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87325,47 +88159,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -input: - | (1 + # + 1) = 1111 -stderr: - | 1//2 + | (* *) + (*) + (*) ./calc.at:1392: $PREPARSER ./calc input -./calc.at:1387: $PREPARSER ./calc input +./calc.at:1393: cat stderr stderr: -./calc.at:1389: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1390: cat stderr +stderr: +error: null divisor +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: stderr: -syntax error: invalid character: '#' -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1390: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1389: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: syntax error +syntax error +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '+', expecting end of file +./calc.at:1394: cat stderr +./calc.at:1393: $PREPARSER ./calc /dev/null stderr: -1.14: memory exhausted stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87376,8 +88206,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1390: cat stderr stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1389: cat stderr +./calc.at:1394: $PREPARSER ./calc input +syntax error +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87387,10 +88223,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -1.14: memory exhausted -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87400,11 +88233,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1387: cat stderr stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87414,41 +88247,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: cat stderr + | 1 + 2 * 3 + !+ ++ input: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1387: $PREPARSER ./calc input +./calc.at:1391: cat stderr +./calc.at:1375: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: $PREPARSER ./calc input stderr: + | (#) + (#) = 2222 +syntax error +./calc.at:1390: $PREPARSER ./calc input stderr: -error: null divisor -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1390: cat stderr input: - | error -./calc.at:1392: $PREPARSER ./calc input -stderr: - | (!!) + (1 2) = 1 -error: null divisor +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1 + 2 * 3 + | (1 + #) = 1111 +./calc.at:1375: $PREPARSER ./calc --exp input ./calc.at:1391: $PREPARSER ./calc input +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1387: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1392: cat stderr +input: +1.6: syntax error: invalid character: '#' input: -1.11: syntax error -1.1-16: error: 2222 != 1 ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1390: $PREPARSER ./calc input stderr: stderr: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1389: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ + | 123 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $PREPARSER ./calc --num input +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87458,9 +88316,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1387: "$PERL" -pi -e 'use strict; +stderr: +1.6: syntax error: invalid character: '#' +stderr: +input: + | (1 + 1) / (1 - 1) +stderr: +input: +stderr: +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87470,11 +88339,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1393: cat stderr +./calc.at:1392: $PREPARSER ./calc input +error: null divisor +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87485,8 +88356,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1394: cat stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +input: +./calc.at:1390: cat stderr +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1391: cat stderr +input: + | error +./calc.at:1394: $PREPARSER ./calc input + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --num input +499. calc.at:1375: ok +input: +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1392: $PREPARSER ./calc input +syntax error +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87496,8 +88400,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +input: +stderr: +input: + | (1 + #) = 1111 +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87507,41 +88414,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1392: cat stderr -500. calc.at:1387: ok stderr: -memory exhausted -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1390: cat stderr -input: - | (- *) + (1 2) = 1 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | (# + 1) = 1111 +stderr: ./calc.at:1391: $PREPARSER ./calc input stderr: -memory exhausted +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.6: syntax error: invalid character: '#' stderr: - | 1 = 2 = 3 -input: -./calc.at:1392: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1390: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1389: cat stderr 1.6: syntax error: invalid character: '#' +./calc.at:1387: cat stderr stderr: -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87551,19 +88453,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -stdout: -./types.at:139: $PREPARSER ./test -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1389: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87573,7 +88464,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1389: $PREPARSER ./calc input ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87584,21 +88476,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -input: - | (#) + (#) = 2222 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1392: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -input: -./calc.at:1390: "$PERL" -pi -e 'use strict; +500. calc.at:1387: ok +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87608,28 +88488,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1391: $PREPARSER ./calc input -stderr: - | - | +1 -./calc.at:1392: $PREPARSER ./calc input -stdout: -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +memory exhausted +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87639,17 +88499,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -======== Testing with C++ standard flags: '' - | (# + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87660,11 +88511,24 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1392: cat stderr +./calc.at:1394: cat stderr +memory exhausted +./calc.at:1390: cat stderr +./calc.at:1391: cat stderr +./calc.at:1374: cat stderr +input: + | 1 = 2 = 3 + +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1393: cat stderr +input: +stderr: +input: + | (# + 1) = 1111 +./calc.at:1390: $PREPARSER ./calc input +input: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87674,32 +88538,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... -./calc.at:1397: mv calc.y.tmp calc.y - -./calc.at:1391: cat stderr -./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1391: $PREPARSER ./calc input +stderr: +input: +stderr: +1.6: syntax error: invalid character: '#' + | (!!) + (1 2) = 1 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1392: cat stderr -./calc.at:1392: $PREPARSER ./calc /dev/null - | (1 + #) = 1111 -./calc.at:1389: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 +./calc.at:1393: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1392: $PREPARSER ./calc input +syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1374: $PREPARSER ./calc --exp input stderr: syntax error +error: 2222 != 1 +stderr: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1391: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted stderr: syntax error +error: 2222 != 1 stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1389: cat stderr +507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... +./calc.at:1395: mv calc.y.tmp calc.y + ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87710,12 +88592,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87725,8 +88602,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87736,11 +88613,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1390: cat stderr -./calc.at:1389: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87751,34 +88624,64 @@ }eg ' expout || exit 77 input: -input: - | (1 + # + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input - | (# + 1) = 1111 + | (#) + (#) = 2222 ./calc.at:1389: $PREPARSER ./calc input +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stderr: -stderr: -stdout: -./calc.at:1392: cat stderr -1.6: syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1394: cat stderr ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check +498. calc.at:1374: ./calc.at:1390: cat stderr + ok ./calc.at:1391: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +./calc.at:1392: cat stderr + | + | +1 +./calc.at:1394: $PREPARSER ./calc input +input: +./calc.at:1393: cat stderr +input: +stderr: + | (1 + # + 1) = 1111 + | (1 + 1) / (1 - 1) +syntax error +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1391: $PREPARSER ./calc input + | (#) + (#) = 2222 stderr: stderr: +./calc.at:1392: $PREPARSER ./calc input input: +1.11-17: error: null divisor 1.6: syntax error: invalid character: '#' +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + + | (- *) + (1 2) = 1 syntax error: invalid character: '#' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1392: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +stderr: +syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87788,24 +88691,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... stderr: +stderr: +./calc.at:1397: mv calc.y.tmp calc.y + syntax error syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: 2222 != 1 +1.6: syntax error: invalid character: '#' stderr: -1.14: memory exhausted +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +1.11-17: error: null divisor +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error syntax error syntax error -error: 4444 != 1 -./calc.at:1390: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +error: 2222 != 1 +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87815,21 +88721,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -stderr: -stdout: - | (1 + 1) / (1 - 1) -./types.at:139: ./check -./calc.at:1390: $PREPARSER ./calc input -1.14: memory exhausted -./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./calc.at:1389: cat stderr -1.11-17: error: null divisor -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87839,8 +88731,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87850,14 +88741,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -input: - | (1 + # + 1) = 1111 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1392: cat stderr -./calc.at:1391: cat stderr -stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87867,55 +88751,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1391: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1389: cat stderr ./calc.at:1390: cat stderr -syntax error: invalid character: '#' -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -error: 2222 != 1 -502. calc.at:1390: ok -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1391: cat stderr +./calc.at:1394: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87925,80 +88765,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1389: cat stderr -./calc.at:1391: cat stderr -./calc.at:1392: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1391: $PREPARSER ./calc input input: input: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + 1) / (1 - 1) -./calc.at:1389: $PREPARSER ./calc input +./calc.at:1392: cat stderr +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc /dev/null stderr: - | (- *) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input -error: null divisor -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1389: $PREPARSER ./calc input +503. calc.at:1391: ok stderr: -1.6: syntax error: invalid character: '#' stderr: syntax error -syntax error -error: 2222 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +syntax error: invalid character: '#' +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -error: null divisor -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error + | (1 + #) = 1111 stderr: -stdout: -./calc.at:1391: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test +./calc.at:1393: cat stderr +./calc.at:1392: $PREPARSER ./calc input +1.11-17: error: null divisor stderr: -syntax error -syntax error -error: 2222 != 1 -509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... +syntax error: invalid character: '#' stderr: -input: - | (# + 1) = 1111 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1398: mv calc.y.tmp calc.y +syntax error: invalid character: '#' +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -501. calc.at:1389: ok -======== Testing with C++ standard flags: '' +input: stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88008,8 +88809,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1391: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' + | (* *) + (*) + (*) +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88019,90 +88822,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1392: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1392: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error syntax error syntax error -./calc.at:1391: cat stderr -input: -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1392: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -1.6: syntax error: invalid character: '#' -./calc.at:1400: mv calc.y.tmp calc.y - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1391: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1392: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./types.at:139: ./check -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88112,20 +88836,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1391: cat stderr -./calc.at:1392: cat stderr -503. calc.at:1391: ok -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1392: $PREPARSER ./calc input +./calc.at:1390: cat stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... +./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1398: mv calc.y.tmp calc.y stderr: -memory exhausted -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +syntax error +syntax error +syntax error +./calc.at:1394: cat stderr ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88136,89 +88857,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./calc.at:1392: cat stderr -./calc.at:1393: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: -stdout: -./types.at:139: $PREPARSER ./test -input: -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -input: - | (#) + (#) = 2222 -./calc.at:1392: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -======== Testing with C++ standard flags: '' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +502. calc.at:1390: ok input: - | 1 2 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1389: cat stderr +./calc.at:1394: $PREPARSER ./calc input stderr: syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... -./calc.at:1401: mv calc.y.tmp calc.y - -stderr: syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1392: cat stderr -./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | (1 + #) = 1111 -./calc.at:1392: $PREPARSER ./calc input ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88229,49 +88881,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1393: cat stderr -stderr: -stdout: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./types.at:139: $PREPARSER ./test - | 1//2 -./calc.at:1393: $PREPARSER ./calc input stderr: syntax error -stderr: -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1392: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: +syntax error +syntax error +syntax error +error: 4444 != 1 | (# + 1) = 1111 + ./calc.at:1392: $PREPARSER ./calc input stderr: stderr: syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1393: cat stderr syntax error: invalid character: '#' -syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./calc.at:1392: "$PERL" -pi -e 'use strict; +input: +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88281,52 +88916,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: ./check stderr: stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: cat stderr -./calc.at:1393: cat stderr -input: -./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | (1 + # + 1) = 1111 -./calc.at:1392: $PREPARSER ./calc input -stderr: -input: +./types.at:139: $PREPARSER ./test syntax error: invalid character: '#' - | error + | 1 + 2 * 3 + !+ ++ ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -syntax error: invalid character: '#' -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88337,29 +88933,14 @@ }eg ' expout || exit 77 stderr: -./calc.at:1393: cat stderr -stdout: -./calc.at:1392: cat stderr -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | 1 = 2 = 3 -input: -./calc.at:1393: $PREPARSER ./calc input -stderr: -syntax error ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1392: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -error: null divisor -syntax error +./calc.at:1394: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: cat stderr +input: + | 1 + 2 * 3 + !- ++ ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88370,136 +88951,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1392: cat stderr -./calc.at:1393: cat stderr -504. calc.at:1392: ok -input: - | - | +1 ./calc.at:1393: $PREPARSER ./calc input +input: +510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... stderr: -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -stderr: -syntax error -stderr: -stdout: -./calc.at:1394: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +./calc.at:1400: mv calc.y.tmp calc.y -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | (!!) + (1 2) = 1 ./calc.at:1394: $PREPARSER ./calc input -./calc.at:1393: cat stderr -stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $PREPARSER ./calc /dev/null -stderr: -stderr: -syntax error ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -syntax error - | 1 2 -./calc.at:1394: $PREPARSER ./calc input stderr: syntax error +error: 2222 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1393: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... -./calc.at:1402: mv calc.y.tmp calc.y - -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1393: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1394: cat stderr +./calc.at:1392: cat stderr stderr: syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +error: 2222 != 1 input: - | 1//2 -./calc.at:1394: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1392: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88520,59 +89002,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: cat stderr -input: -./calc.at:1394: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1393: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -input: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error stderr: +syntax error: invalid character: '#' stderr: -./calc.at:1394: $PREPARSER ./calc input -stdout: -syntax error -error: 2222 != 1 -stderr: -./calc.at:1395: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +syntax error: invalid character: '#' +511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... +./calc.at:1401: mv calc.y.tmp calc.y -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -input: -stdout: -./types.at:139: $PREPARSER ./test - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1395: $PREPARSER ./calc input -stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1394: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88582,12 +89021,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1393: cat stderr +./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88598,123 +89034,41 @@ }eg ' expout || exit 77 input: - | 1 2 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1393: cat stderr -stderr: -syntax error, unexpected number -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: | (- *) + (1 2) = 1 +input: +./calc.at:1394: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ ./calc.at:1393: $PREPARSER ./calc input -syntax error, unexpected number -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: cat stderr +./calc.at:1389: cat stderr stderr: syntax error syntax error error: 2222 != 1 -./calc.at:1394: cat stderr -./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1395: cat stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: -syntax error +memory exhausted ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1//2 -./calc.at:1395: $PREPARSER ./calc input -syntax error -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: cat stderr stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: - | (* *) + (*) + (*) -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: cat stderr -./calc.at:1394: cat stderr input: -stderr: -syntax error -syntax error +memory exhausted syntax error -input: -stderr: - | - | +1 -./calc.at:1394: $PREPARSER ./calc input - | error -stdout: -stderr: -./calc.at:1395: $PREPARSER ./calc input syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -syntax error, unexpected invalid token -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: 2222 != 1 + | (1 + 1) / (1 - 1) +./calc.at:1389: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1392: $PREPARSER ./calc input stderr: stderr: -syntax error +451. types.at:139: ok +error: null divisor +error: null divisor +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected invalid token -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88725,7 +89079,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88736,11 +89089,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +error: null divisor stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: ./check -./calc.at:1395: "$PERL" -pi -e 'use strict; +error: null divisor +./calc.at:1393: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88750,59 +89103,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1394: cat stderr -./calc.at:1395: cat stderr input: - | 1 + 2 * 3 + !+ ++ + + | (#) + (#) = 2222 ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1394: $PREPARSER ./calc /dev/null -stderr: -input: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -stderr: -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '=' -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: input: -syntax error, unexpected '=' - | 1 + 2 * 3 + !- ++ -./calc.at:1393: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr -./calc.at:1394: cat stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88812,51 +89119,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | - | +1 -./calc.at:1395: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) ./calc.at:1394: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: cat stderr +./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS syntax error syntax error syntax error -syntax error -error: 4444 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1393: cat stderr -syntax error, unexpected '+' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -input: +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: +./calc.at:1389: cat stderr syntax error syntax error syntax error -syntax error -error: 4444 != 1 - | 1 + 2 * 3 + !* ++ -./calc.at:1393: $PREPARSER ./calc input -stderr: -memory exhausted -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +504. calc.at:1392: ok ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88867,11 +89152,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -./calc.at:1394: cat stderr -./calc.at:1395: $PREPARSER ./calc /dev/null -stderr: -syntax error, unexpected end of input ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88882,80 +89162,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +501. calc.at:1389: ok +./calc.at:1394: cat stderr + input: - | (!!) + (1 2) = 1 -stdout: + | 1 + 2 * 3 + !+ ++ ./calc.at:1394: $PREPARSER ./calc input +./calc.at:1393: cat stderr stderr: -./types.at:139: ./check -stderr: -syntax error, unexpected end of input -syntax error -error: 2222 != 1 + ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -syntax error -error: 2222 != 1 -./calc.at:1393: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: - | (#) + (#) = 2222 +stderr: + | (1 + #) = 1111 ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... input: -./calc.at:1394: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1395: $PREPARSER ./calc input -syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1402: mv calc.y.tmp calc.y + syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1394: $PREPARSER ./calc input +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +syntax error: invalid character: '#' stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88966,21 +89201,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1393: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88991,46 +89211,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -input: +./calc.at:1393: cat stderr +513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... +./calc.at:1403: mv calc.y.tmp calc.y + +514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +./calc.at:1405: mv calc.y.tmp calc.y + input: - | (!!) + (1 2) = 1 - | (1 + #) = 1111 +./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1394: cat stderr + | (# + 1) = 1111 ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1395: $PREPARSER ./calc input -stderr: stderr: syntax error: invalid character: '#' ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr -stderr: -syntax error: invalid character: '#' -stderr: -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | (* *) + (*) + (*) + | 1 + 2 * 3 + !* ++ ./calc.at:1394: $PREPARSER ./calc input -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error -syntax error -syntax error +syntax error: invalid character: '#' +stderr: +memory exhausted ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: cat stderr +stderr: +memory exhausted ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89041,22 +89247,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -input: - | (- *) + (1 2) = 1 -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89068,67 +89258,26 @@ }eg ' expout || exit 77 ./calc.at:1393: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1393: $PREPARSER ./calc input ./calc.at:1394: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -syntax error: invalid character: '#' - | 1 + 2 * 3 + !+ ++ -./calc.at:1394: $PREPARSER ./calc input -stderr: -./calc.at:1395: cat stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1395: $PREPARSER ./calc input -stderr: -./calc.at:1393: cat stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1394: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (#) + (#) = 2222 | (1 + # + 1) = 1111 +./calc.at:1394: $PREPARSER ./calc input ./calc.at:1393: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: syntax error: invalid character: '#' ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89139,22 +89288,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1395: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1394: cat stderr ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89165,61 +89299,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr -./calc.at:1393: cat stderr -stderr: -input: - | 1 + 2 * 3 + !- ++ +./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS input: -./calc.at:1395: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ + | (1 + #) = 1111 ./calc.at:1394: $PREPARSER ./calc input stderr: +./calc.at:1393: cat stderr +syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: +syntax error: invalid character: '#' input: -memory exhausted -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: | (1 + 1) / (1 - 1) ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: -memory exhausted -stderr: -stderr: -error: null divisor -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -error: null divisor -./calc.at:1397: $PREPARSER ./calc input ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89230,21 +89324,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +error: null divisor +./calc.at:1394: cat stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: cat stderr +error: null divisor input: + | (# + 1) = 1111 +./calc.at:1394: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89255,24 +89345,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1397: $PREPARSER ./calc input -input: -stderr: -1.3: syntax error - | 1 + 2 * 3 + !* ++ -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1394: cat stderr -stderr: -memory exhausted stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' ./calc.at:1393: cat stderr -1.3: syntax error -stderr: -memory exhausted -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89282,51 +89358,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: 505. calc.at:1393: ok - | (#) + (#) = 2222 +./calc.at:1394: cat stderr + +input: + | (1 + # + 1) = 1111 ./calc.at:1394: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: -./calc.at:1395: cat stderr - | 1//2 syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1397: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -1.3: syntax error -input: - | (#) + (#) = 2222 -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89337,47 +89379,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1394: cat stderr -513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... -./calc.at:1403: mv calc.y.tmp calc.y +515. calc.at:1407: testing Calculator %glr-parser %debug ... +./calc.at:1407: mv calc.y.tmp calc.y -./calc.at:1397: cat stderr +./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | (1 + #) = 1111 + | (1 + 1) / (1 - 1) ./calc.at:1394: $PREPARSER ./calc input -./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | error stderr: -./calc.at:1397: $PREPARSER ./calc input -syntax error: invalid character: '#' +error: null divisor ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error -stderr: -syntax error: invalid character: '#' -./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1395: cat stderr -1.1: syntax error +error: null divisor ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89388,23 +89402,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: cat stderr +506. calc.at:1394: ok + +./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1408: mv calc.y.tmp calc.y + +./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: stdout: ./calc.at:1398: "$PERL" -ne ' @@ -89419,12 +89425,6 @@ )' calc.c input: -stderr: -syntax error: invalid character: '#' - | (# + 1) = 1111 -./calc.at:1397: cat stderr -./calc.at:1394: $PREPARSER ./calc input -input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -89440,13 +89440,16 @@ | (2^2)^3 = 64 ./calc.at:1398: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 2 +./calc.at:1398: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' stderr: stdout: -./calc.at:1400: "$PERL" -ne ' +1.3: syntax error, unexpected number +./calc.at:1395: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -89455,14 +89458,10 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c calc.h + )' calc.c -input: - | 1 = 2 = 3 -./calc.at:1397: $PREPARSER ./calc input +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -89477,21 +89476,13 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr +1.3: syntax error, unexpected number +./calc.at:1395: $PREPARSER ./calc input stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89501,56 +89492,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | (# + 1) = 1111 + | 1 2 ./calc.at:1395: $PREPARSER ./calc input -1.7: syntax error -input: stderr: -syntax error: invalid character: '#' +syntax error, unexpected number ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 +./calc.at:1398: cat stderr stderr: -./calc.at:1400: $PREPARSER ./calc input +syntax error, unexpected number input: - | 1 2 + | 1//2 ./calc.at:1398: $PREPARSER ./calc input -./calc.at:1394: cat stderr -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1394: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' stderr: -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.3: syntax error, unexpected number +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error: invalid character: '#' -1.3: syntax error, unexpected number -./calc.at:1400: cat stderr ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89561,24 +89516,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1//2 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1395: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89589,48 +89530,34 @@ }eg ' expout || exit 77 input: -./calc.at:1398: cat stderr -stderr: - | (1 + # + 1) = 1111 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) + | 1//2 ./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: cat stderr -input: stderr: - | 1//2 -./calc.at:1394: cat stderr -./calc.at:1398: $PREPARSER ./calc input -syntax error: invalid character: '#' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr stderr: -input: - | - | +1 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1400: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -stderr: -2.1: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: stderr: -input: - | (1 + 1) / (1 - 1) -./calc.at:1394: $PREPARSER ./calc input | error -./calc.at:1400: $PREPARSER ./calc input -2.1: syntax error -stderr: -error: null divisor +./calc.at:1398: $PREPARSER ./calc input +stdout: stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +1.1: syntax error, unexpected invalid token +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89640,7 +89567,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1395: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: + | error +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +stderr: +syntax error, unexpected invalid token ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89651,8 +89600,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +syntax error, unexpected invalid token +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1398: cat stderr +input: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89663,51 +89618,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -error: null divisor -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1397: cat stderr -./calc.at:1400: cat stderr + | 1 2 +./calc.at:1397: $PREPARSER ./calc input ./calc.at:1395: cat stderr -./calc.at:1398: cat stderr -./calc.at:1397: $PREPARSER ./calc /dev/null -./calc.at:1394: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: - | (1 + 1) / (1 - 1) -1.1: syntax error -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: input: -stderr: - | error -./calc.at:1398: $PREPARSER ./calc input -error: null divisor +1.3: syntax error | 1 = 2 = 3 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1400: $PREPARSER ./calc input -1.1: syntax error stderr: -1.1: syntax error, unexpected invalid token +1.7: syntax error, unexpected '=' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error stderr: -error: null divisor -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr -1.1: syntax error, unexpected invalid token +input: +1.7: syntax error, unexpected '=' + | 1 = 2 = 3 ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89718,8 +89646,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1395: $PREPARSER ./calc input stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +syntax error, unexpected '=' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89730,10 +89660,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -506. calc.at:1394: ok -./calc.at:1400: cat stderr ./calc.at:1397: cat stderr +stderr: +syntax error, unexpected '=' ./calc.at:1398: cat stderr +input: + | + | +1 +./calc.at:1398: $PREPARSER ./calc input +stderr: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89745,66 +89680,23 @@ }eg ' expout || exit 77 input: -input: - | - | +1 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1395: cat stderr - | 1 = 2 = 3 -input: -./calc.at:1398: $PREPARSER ./calc input - -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1//2 +2.1: syntax error, unexpected '+' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1397: $PREPARSER ./calc input +./calc.at:1395: cat stderr stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -507. calc.at:1395: stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -1.7: syntax error, unexpected '=' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error +2.1: syntax error, unexpected '+' ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -1.7: syntax error, unexpected '=' -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1400: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: $PREPARSER ./calc /dev/null -stderr: +input: stdout: - + | + | +1 +stderr: +./calc.at:1395: $PREPARSER ./calc input ./calc.at:1401: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -89816,18 +89708,23 @@ || /\t/ )' calc.c calc.h +1.3: syntax error +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1398: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr -input: - | - | +1 -./calc.at:1398: $PREPARSER ./calc input +syntax error, unexpected '+' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1398: cat stderr +syntax error, unexpected '+' input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -89843,47 +89740,27 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1401: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1397: $PREPARSER ./calc input +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1398: $PREPARSER ./calc /dev/null stderr: -./calc.at:1400: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error, unexpected '+' -input: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.1: syntax error, unexpected end of input ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1400: $PREPARSER ./calc input -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr input: -2.1: syntax error, unexpected '+' | 1 2 -./calc.at:1401: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89893,8 +89770,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1401: $PREPARSER ./calc input stderr: +stderr: +input: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + | error +./calc.at:1397: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1400: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +./calc.at:1395: cat stderr +stderr: +1.1: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1395: $PREPARSER ./calc /dev/null ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89905,55 +89809,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -./calc.at:1405: mv calc.y.tmp calc.y - -./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1398: cat stderr -./calc.at:1401: cat stderr -./calc.at:1400: cat stderr -./calc.at:1398: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1397: cat stderr -input: - | 1//2 -./calc.at:1401: $PREPARSER ./calc input -input: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input input: - | (- *) + (1 2) = 1 -./calc.at:1397: $PREPARSER ./calc input +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -515. calc.at:1407: testing Calculator %glr-parser %debug ... -./calc.at:1407: mv calc.y.tmp calc.y - - | (!!) + (1 2) = 1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +1.1: syntax error ./calc.at:1400: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1401: cat stderr stderr: -./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1400: cat stderr +syntax error, unexpected end of input +./calc.at:1398: cat stderr stderr: -1.1: syntax error, unexpected end of input +input: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89964,11 +89847,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr + | 1//2 input: - | (- *) + (1 2) = 1 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1401: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1398: $PREPARSER ./calc input +input: +stderr: +stderr: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89978,59 +89865,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: - | error -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1400: $PREPARSER ./calc input +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1398: cat stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | (* *) + (*) + (*) -./calc.at:1397: $PREPARSER ./calc input -stderr: -stderr: -input: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1398: $PREPARSER ./calc input -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -./calc.at:1401: cat stderr ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1397: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +stderr: +stderr: stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -./calc.at:1400: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -input: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) input: - | (* *) + (*) + (*) - | 1 = 2 = 3 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1395: $PREPARSER ./calc input +stderr: ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90041,10 +89904,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +input: +./calc.at:1401: cat stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1397: $PREPARSER ./calc input +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1398: cat stderr +stderr: +./calc.at:1400: cat stderr +input: +1.7: syntax error + | error ./calc.at:1401: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1//2 ./calc.at:1400: $PREPARSER ./calc input stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90054,68 +89946,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1398: cat stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr -stderr: -input: - | (!!) + (1 2) = 1 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: -./calc.at:1401: cat stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | 1 + 2 * 3 + !+ ++ -./calc.at:1397: $PREPARSER ./calc input stderr: +1.7: syntax error stderr: -input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1400: cat stderr -./calc.at:1401: $PREPARSER ./calc input -stderr: +./calc.at:1395: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1398: $PREPARSER ./calc input +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: cat stderr 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1400: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ stderr: -./calc.at:1397: $PREPARSER ./calc input -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | (!!) + (1 2) = 1 +./calc.at:1400: cat stderr +./calc.at:1395: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1401: $PREPARSER ./calc input stderr: +syntax error, unexpected number +error: 2222 != 1 stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1401: cat stderr - | 1 + 2 * 3 + !- ++ +stderr: + | error ./calc.at:1400: $PREPARSER ./calc input stderr: -./calc.at:1401: $PREPARSER ./calc /dev/null +syntax error, unexpected number +error: 2222 != 1 +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1397: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90125,26 +90012,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: cat stderr -./calc.at:1397: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1401: cat stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1400: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1397: $PREPARSER ./calc input -stderr: -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90154,43 +90024,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1401: $PREPARSER ./calc input -1.14: memory exhausted -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1400: cat stderr ./calc.at:1398: cat stderr -1.14: memory exhausted -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +input: + | + | +1 + | + | +1 +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1395: cat stderr stderr: -1.14: memory exhausted -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +input: stderr: -1.14: memory exhausted +2.1: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 = 2 = 3 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: $PREPARSER ./calc input | (- *) + (1 2) = 1 ./calc.at:1398: $PREPARSER ./calc input -./calc.at:1401: cat stderr stderr: -./calc.at:1400: cat stderr +input: + | (- *) + (1 2) = 1 +stderr: +2.1: syntax error +stderr: +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: $PREPARSER ./calc input +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -input: ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90200,23 +90087,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -stderr: -./calc.at:1400: $PREPARSER ./calc input -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1401: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90226,43 +90098,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1397: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -input: +./calc.at:1401: $PREPARSER ./calc /dev/null ./calc.at:1398: cat stderr -input: - | (#) + (#) = 2222 -stderr: -./calc.at:1397: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (- *) + (1 2) = 1 -./calc.at:1401: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1400: cat stderr stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: - | (* *) + (*) + (*) -./calc.at:1400: cat stderr -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90272,61 +90114,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1400: $PREPARSER ./calc input -stderr: input: -1.6: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (* *) + (*) + (*) -./calc.at:1401: $PREPARSER ./calc input +input: +./calc.at:1398: $PREPARSER ./calc input stderr: -./calc.at:1397: cat stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: +./calc.at:1397: cat stderr + | + | +1 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1400: $PREPARSER ./calc input ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -input: +./calc.at:1395: cat stderr +./calc.at:1397: $PREPARSER ./calc /dev/null stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | (1 + #) = 1111 stderr: -./calc.at:1397: $PREPARSER ./calc input +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: cat stderr 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: -./calc.at:1401: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1400: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: - | 1 + 2 * 3 + !+ ++ + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: ./calc.at:1401: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90336,36 +90159,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: cat stderr -stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: -input: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1401: $PREPARSER ./calc input -input: -./calc.at:1400: $PREPARSER ./calc input - | (# + 1) = 1111 -stderr: -./calc.at:1397: $PREPARSER ./calc input + | (* *) + (*) + (*) +1.1: syntax error +./calc.at:1395: $PREPARSER ./calc input stderr: stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1398: cat stderr stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 stdout: -1.2: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1402: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -90378,15 +90200,30 @@ )' calc.c calc.h input: -stderr: -1.2: syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 + !- ++ +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 + !+ ++ ./calc.at:1398: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: cat stderr stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90396,9 +90233,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1401: cat stderr +./calc.at:1397: cat stderr stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -90413,180 +90249,68 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1400: cat stderr ./calc.at:1402: $PREPARSER ./calc input input: -./calc.at:1397: cat stderr -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1401: $PREPARSER ./calc input stderr: -stderr: -./calc.at:1398: cat stderr -input: -1.14: memory exhausted +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 - | 1 2 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1402: $PREPARSER ./calc input -input: -stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1398: $PREPARSER ./calc input stderr: input: - | (1 + # + 1) = 1111 -1.6: syntax error: invalid character: '#' -1.3: syntax error, unexpected number -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1398: $PREPARSER ./calc input ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1397: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -1.6: syntax error: invalid character: '#' +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.14: memory exhausted ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: cat stderr +stdout: stderr: -1.14: memory exhausted -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1401: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: cat stderr stderr: -1.11-17: error: null divisor -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1402: cat stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1403: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (#) + (#) = 2222 -stderr: -./calc.at:1401: $PREPARSER ./calc input -1.11-17: error: null divisor - | (1 + 1) / (1 - 1) -./calc.at:1398: cat stderr -stderr: -./calc.at:1397: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' input: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1400: cat stderr - | 1//2 -1.11-17: error: null divisor -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ + | 1 2 ./calc.at:1402: $PREPARSER ./calc input stderr: +./calc.at:1395: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: +./calc.at:1400: cat stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.3: syntax error, unexpected number ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -1.11-17: error: null divisor -./calc.at:1398: $PREPARSER ./calc input -510. calc.at:1400: ok stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1401: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -stderr: -./calc.at:1401: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' - -./calc.at:1402: cat stderr -input: -./calc.at:1401: cat stderr - | error -./calc.at:1402: $PREPARSER ./calc input ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90597,25 +90321,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -508. calc.at:1397: ok -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -./calc.at:1401: $PREPARSER ./calc input -stderr: -./calc.at:1398: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1403: $PREPARSER ./calc input +1.3: syntax error, unexpected number +./calc.at:1401: cat stderr input: - | (1 + #) = 1111 -./calc.at:1398: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90625,41 +90349,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1398: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1402: cat stderr -stderr: -./calc.at:1401: cat stderr -1.6: syntax error: invalid character: '#' -input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: $PREPARSER ./calc input input: - | 1 = 2 = 3 -./calc.at:1402: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 + 2 * 3 + !- ++ stderr: -1.7: syntax error, unexpected '=' stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1398: cat stderr +input: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90670,194 +90375,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1402: cat stderr input: -./calc.at:1398: $PREPARSER ./calc input -stderr: - | (1 + 1) / (1 - 1) + | (- *) + (1 2) = 1 ./calc.at:1401: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor input: - | - | +1 -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -1.2: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1408: mv calc.y.tmp calc.y - -2.1: syntax error, unexpected '+' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1397: cat stderr + | 1 + 2 * 3 + !* ++ stderr: -2.1: syntax error, unexpected '+' -./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1401: cat stderr -./calc.at:1398: cat stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -511. calc.at:1401: input: - ok - | (1 + # + 1) = 1111 ./calc.at:1398: $PREPARSER ./calc input -./calc.at:1402: cat stderr -517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: mv calc.y.tmp calc.y - -./calc.at:1402: $PREPARSER ./calc /dev/null +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 stderr: -1.1: syntax error, unexpected end of file -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of file -./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: cat stderr -./calc.at:1398: cat stderr +1.3: syntax error, unexpected number input: - | (1 + 1) / (1 - 1) -./calc.at:1398: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr + | (!!) + (1 2) = 1 stderr: -1.11-17: error: null divisor -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1402: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1400: cat stderr stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -509. calc.at:1398: stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 - ok -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: cat stderr - input: - | (!!) + (1 2) = 1 -./calc.at:1402: $PREPARSER ./calc input -518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -./calc.at:1411: mv calc.y.tmp calc.y - stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.11: syntax error, unexpected number +1.11: syntax error 1.1-16: error: 2222 != 1 -./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1402: cat stderr -input: - | (- *) + (1 2) = 1 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +1.3: syntax error, unexpected number +1.14: memory exhausted ./calc.at:1402: $PREPARSER ./calc input stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1413: mv calc.y.tmp calc.y - -./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1402: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90867,20 +90441,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1402: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90890,22 +90451,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1402: cat stderr +./calc.at:1401: cat stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1402: $PREPARSER ./calc input -stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | (!!) + (1 2) = 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1400: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !- ++ -./calc.at:1402: $PREPARSER ./calc input -stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1398: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90915,16 +90469,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1402: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1403: cat stderr stderr: -1.14: memory exhausted -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1401: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90934,17 +90486,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1402: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90955,18 +90502,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1402: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 stdout: -./calc.at:1403: "$PERL" -ne ' +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1405: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -90977,17 +90520,31 @@ || /\t/ )' calc.c calc.h -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +./calc.at:1397: cat stderr +./calc.at:1402: cat stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +input: +./calc.at:1400: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1//2 +./calc.at:1395: cat stderr +./calc.at:1403: $PREPARSER ./calc input +input: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +input: + | error +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1401: cat stderr +stderr: + | (- *) + (1 2) = 1 +./calc.at:1397: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91001,76 +90558,48 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1402: cat stderr -./calc.at:1403: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1405: $PREPARSER ./calc input ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: - | (# + 1) = 1111 -./calc.at:1402: $PREPARSER ./calc input input: +1.1: syntax error, unexpected invalid token stderr: - | 1 2 -1.2: syntax error: invalid character: '#' -./calc.at:1403: $PREPARSER ./calc input ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error, unexpected number -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ 1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: cat stderr -./calc.at:1403: cat stderr -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1402: $PREPARSER ./calc input - | 1//2 -./calc.at:1403: $PREPARSER ./calc input stderr: +input: stderr: -1.6: syntax error: invalid character: '#' 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +stderr: +memory exhausted +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | 1 + 2 * 3 + !+ ++ +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1401: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91080,24 +90609,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -./calc.at:1402: cat stderr -input: -input: - | error -./calc.at:1403: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1402: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected invalid token -stderr: -1.11-17: error: null divisor ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91108,6 +90620,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91118,19 +90633,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -./calc.at:1402: cat stderr -input: - | 1 = 2 = 3 -512. calc.at:1402: ./calc.at:1403: $PREPARSER ./calc input - ok -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: -1.7: syntax error, unexpected '=' - -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91140,17 +90647,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1402: cat stderr +./calc.at:1398: cat stderr ./calc.at:1403: cat stderr input: - | - | +1 -./calc.at:1403: $PREPARSER ./calc input -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1403: "$PERL" -pi -e 'use strict; +input: +./calc.at:1400: cat stderr + | 1 2 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91160,136 +90669,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -./calc.at:1403: $PREPARSER ./calc /dev/null + | 1 = 2 = 3 +./calc.at:1402: $PREPARSER ./calc input +input: stderr: -1.1: syntax error, unexpected end of input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of input -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1403: cat stderr -./calc.at:1414: mv calc.y.tmp calc.y - input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1398: $PREPARSER ./calc input +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.7: syntax error, unexpected '=' input: - | (!!) + (1 2) = 1 -./calc.at:1403: $PREPARSER ./calc input +./calc.at:1395: cat stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1400: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 -./calc.at:1403: $PREPARSER ./calc input stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr -input: | (* *) + (*) + (*) -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr -input: - | 1 + 2 * 3 + !+ ++ +1.7: syntax error, unexpected '=' +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1397: $PREPARSER ./calc input + | error ./calc.at:1403: $PREPARSER ./calc input stderr: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1403: $PREPARSER ./calc input stderr: +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.1: syntax error, unexpected invalid token + | (#) + (#) = 2222 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1401: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91299,16 +90731,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: -1.14: memory exhausted -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1405: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91318,37 +90746,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1403: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr input: - | (1 + #) = 1111 -./calc.at:1403: $PREPARSER ./calc input +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +./calc.at:1398: cat stderr +./calc.at:1400: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1401: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; +1.14: memory exhausted +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91358,15 +90776,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr stderr: -1.2: syntax error: invalid character: '#' +stdout: +./calc.at:1407: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91377,16 +90800,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; +input: +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1400: $PREPARSER ./calc input +input: + | + | +1 +1.14: memory exhausted + | 1//2 +./calc.at:1402: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91396,46 +90825,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: ./calc.at:1403: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1403: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1403: cat stderr -513. calc.at:1403: ok - -521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1416: mv calc.y.tmp calc.y - -./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: -stdout: -./calc.at:1405: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1401: cat stderr +./calc.at:1395: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91449,66 +90854,67 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1405: $PREPARSER ./calc input -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: - | 1 2 -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: $PREPARSER ./calc input stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1405: cat stderr -input: - | 1//2 -./calc.at:1405: $PREPARSER ./calc input stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1403: $PREPARSER ./calc input 1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +2.1: syntax error, unexpected '+' +input: +input: stderr: -stdout: -./calc.at:1407: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1405: cat stderr input: - | error + | 1 + 2 * 3 + !- ++ +./calc.at:1400: $PREPARSER ./calc input + | (1 + #) = 1111 +1.7: syntax error, unexpected '=' +./calc.at:1395: $PREPARSER ./calc input input: -./calc.at:1405: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1407: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1401: $PREPARSER ./calc input stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: $PREPARSER ./calc input +stderr: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: ./calc.at:1405: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -92345,10 +91751,15 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +./calc.at:1398: cat stderr ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +stderr: +./calc.at:1402: cat stderr Starting parse Entering state 0 Reading a token @@ -93185,15 +92596,36 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - | 1 = 2 = 3 -./calc.at:1405: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: input: +./calc.at:1402: $PREPARSER ./calc /dev/null +input: + | (1 + # + 1) = 1111 + | 1 + 2 * 3 + !- ++ | 1 2 -stderr: +./calc.at:1397: $PREPARSER ./calc input ./calc.at:1407: $PREPARSER ./calc input -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.1: syntax error, unexpected end of file +stderr: +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $PREPARSER ./calc input +stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -93209,10 +92641,26 @@ syntax error Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.1: 2) +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr +input: stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) stderr: +./calc.at:1403: cat stderr +stderr: +stderr: +1.1: syntax error, unexpected end of file Starting parse Entering state 0 Reading a token @@ -93228,10 +92676,78 @@ syntax error Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.1: 2) -./calc.at:1405: cat stderr +1.6: syntax error: invalid character: '#' + | error +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1395: cat stderr +stderr: +input: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: cat stderr input: + | 1 + 2 * 3 + !* ++ +./calc.at:1400: $PREPARSER ./calc input | | +1 +stderr: +./calc.at:1403: $PREPARSER ./calc input +input: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.14: memory exhausted +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +input: +stderr: +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' + | (1 + #) = 1111 +stderr: +./calc.at:1402: cat stderr +syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted +./calc.at:1401: $PREPARSER ./calc input +stderr: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1398: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93242,19 +92758,118 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1400: cat stderr +./calc.at:1402: $PREPARSER ./calc input + | 1 = 2 = 3 +input: ./calc.at:1405: $PREPARSER ./calc input +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) + | (1 + 1) / (1 - 1) +./calc.at:1398: $PREPARSER ./calc input +input: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) + | (#) + (#) = 2222 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1403: cat stderr +./calc.at:1397: cat stderr +./calc.at:1401: cat stderr +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor ./calc.at:1407: cat stderr -./calc.at:1405: cat stderr +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1403: $PREPARSER ./calc /dev/null +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +stderr: +stderr: +1.1: syntax error, unexpected end of input + | (# + 1) = 1111 +./calc.at:1401: $PREPARSER ./calc input +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.11-17: error: null divisor +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1397: $PREPARSER ./calc input input: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1405: cat stderr +1.1: syntax error, unexpected end of input +./calc.at:1395: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1//2 ./calc.at:1407: $PREPARSER ./calc input -./calc.at:1405: $PREPARSER ./calc /dev/null -stderr: stderr: Starting parse Entering state 0 @@ -93277,10 +92892,33 @@ Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1400: cat stderr +input: +1.2: syntax error: invalid character: '#' +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +input: stderr: +./calc.at:1398: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input stderr: +./calc.at:1402: cat stderr +input: Starting parse Entering state 0 Reading a token @@ -93301,8 +92939,36 @@ Error: popping token '/' (1.1: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.1: ) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1405: cat stderr +stderr: +1.14: memory exhausted +./calc.at:1403: cat stderr + | (1 + #) = 1111 +stderr: +./calc.at:1400: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1401: cat stderr +input: +1.6: syntax error: invalid character: '#' +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) + | (!!) + (1 2) = 1 +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1403: $PREPARSER ./calc input +509. calc.at:1398: ok +syntax error: invalid character: '#' +stderr: +input: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93313,28 +92979,98 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1405: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1400: cat stderr +stderr: +./calc.at:1405: $PREPARSER ./calc /dev/null +1.6: syntax error: invalid character: '#' ./calc.at:1407: cat stderr +./calc.at:1397: cat stderr + +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: $PREPARSER ./calc input input: +input: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1403: cat stderr | error ./calc.at:1407: $PREPARSER ./calc input -./calc.at:1405: cat stderr +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1402: cat stderr +stderr: +./calc.at:1401: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1395: cat stderr Starting parse Entering state 0 Reading a token @@ -93342,23 +93078,77 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: +input: +1.2: syntax error: invalid character: '#' | (!!) + (1 2) = 1 +./calc.at:1403: $PREPARSER ./calc input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +stderr: +input: +stderr: + | (1 + 1) / (1 - 1) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input +stderr: +./calc.at:1402: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Next token is token "invalid token" (1.1: ) syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +./calc.at:1405: cat stderr +1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1395: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +1.11-17: error: null divisor +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +error: null divisor +./calc.at:1400: cat stderr +1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: +1.11-17: error: null divisor +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1405: $PREPARSER ./calc input +error: null divisor +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93369,24 +93159,98 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1401: cat stderr +stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1400: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +./calc.at:1403: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1402: cat stderr +511. calc.at:1401: ok +1.6: syntax error: invalid character: '#' +./calc.at:1407: cat stderr ./calc.at:1405: cat stderr input: +./calc.at:1395: cat stderr +input: +input: + | (1 + #) = 1111 + | (* *) + (*) + (*) | (- *) + (1 2) = 1 -./calc.at:1407: cat stderr -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1397: $PREPARSER ./calc input input: - | 1 = 2 = 3 +input: +./calc.at:1400: cat stderr + | (!!) + (1 2) = 1 +stderr: stderr: +./calc.at:1405: $PREPARSER ./calc input +507. calc.at:1395: ok + | 1 = 2 = 3 ./calc.at:1407: $PREPARSER ./calc input -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + +stderr: stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -93416,8 +93280,26 @@ Error: popping token '=' (1.1: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.1: ) +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: +input: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: +1.6: syntax error: invalid character: '#' stderr: + | (1 + 1) / (1 - 1) +./calc.at:1400: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -93448,14 +93330,69 @@ Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.1: ) ./calc.at:1405: cat stderr +stderr: +517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... +1.11-17: error: null divisor +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: mv calc.y.tmp calc.y + +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + input: - | (* *) + (*) + (*) +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 ./calc.at:1405: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1403: cat stderr +./calc.at:1402: cat stderr +./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +stderr: +./calc.at:1397: cat stderr +input: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1402: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) +./calc.at:1403: $PREPARSER ./calc input +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93467,19 +93404,54 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1407: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1405: cat stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +510. calc.at:1400: stderr: + ok +input: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1407: cat stderr +stderr: + | (# + 1) = 1111 +./calc.at:1397: $PREPARSER ./calc input input: input: +input: + | (* *) + (*) + (*) +./calc.at:1405: $PREPARSER ./calc input | | +1 + | 1 + 2 * 3 + !- ++ +stderr: +1.2: syntax error: invalid character: '#' ./calc.at:1407: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1405: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: $PREPARSER ./calc input +stderr: + +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -93500,8 +93472,14 @@ Error: popping nterm input (1.1: ) Cleanup: discarding lookahead token '+' (1.1: ) stderr: +1.2: syntax error: invalid character: '#' ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1403: cat stderr +stderr: stderr: Starting parse Entering state 0 @@ -93522,13 +93500,17 @@ syntax error Error: popping nterm input (1.1: ) Cleanup: discarding lookahead token '+' (1.1: ) -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1405: $PREPARSER ./calc input -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -93538,34 +93520,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1405: cat stderr -./calc.at:1407: cat stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1407: $PREPARSER ./calc /dev/null -./calc.at:1405: $PREPARSER ./calc input -stderr: -stderr: -1.14: memory exhausted -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -1.14: memory exhausted + | 1 + 2 * 3 + !+ ++ +./calc.at:1403: $PREPARSER ./calc input ./calc.at:1405: cat stderr +stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +input: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93576,20 +93538,154 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !+ ++ input: - | (#) + (#) = 2222 +stderr: ./calc.at:1405: $PREPARSER ./calc input -./calc.at:1407: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1402: cat stderr +./calc.at:1397: $PREPARSER ./calc input +518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1411: mv calc.y.tmp calc.y + stderr: +input: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1413: mv calc.y.tmp calc.y + + | 1 + 2 * 3 + !- ++ +stderr: +stderr: +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1403: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1407: cat stderr +./calc.at:1407: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +input: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input +stderr: +1.14: memory exhausted +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted +stderr: +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1405: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1403: cat stderr +520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1414: mv calc.y.tmp calc.y + +./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1397: cat stderr +input: +input: +stderr: + | 1 + 2 * 3 + !* ++ + | (#) + (#) = 2222 +1.14: memory exhausted +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: $PREPARSER ./calc input +input: +stderr: +stderr: +./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: + | (1 + 1) / (1 - 1) 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1407: $PREPARSER ./calc input +1.11-17: error: null divisor +stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.14: memory exhausted +./calc.at:1407: $PREPARSER ./calc input +stderr: +./calc.at:1405: cat stderr +1.11-17: error: null divisor stderr: Starting parse Entering state 0 @@ -93839,8 +93935,40 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1405: $PREPARSER ./calc input stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -94088,12 +94216,21 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: - | (1 + #) = 1111 -./calc.at:1405: $PREPARSER ./calc input +./calc.at:1403: cat stderr stderr: -1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1402: cat stderr ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: + | (#) + (#) = 2222 +input: +./calc.at:1397: cat stderr +stderr: + | (1 + #) = 1111 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -94104,20 +94241,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.6: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +508. calc.at:1397: ok +stderr: ./calc.at:1405: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: ./calc.at:1407: cat stderr +1.6: syntax error: invalid character: '#' input: + | (1 + #) = 1111 +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS + +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (# + 1) = 1111 +./calc.at:1403: cat stderr +stderr: | (!!) + (1 2) = 1 +1.6: syntax error: invalid character: '#' ./calc.at:1407: $PREPARSER ./calc input -./calc.at:1405: $PREPARSER ./calc input -stderr: +./calc.at:1402: cat stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -94229,9 +94406,14 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) + | (1 + #) = 1111 +./calc.at:1403: $PREPARSER ./calc input ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr stderr: stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -94343,8 +94525,19 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +input: +stderr: +1.6: syntax error: invalid character: '#' + | (# + 1) = 1111 +./calc.at:1405: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +stderr: +./calc.at:1402: $PREPARSER ./calc input 1.2: syntax error: invalid character: '#' -./calc.at:1405: cat stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -94355,26 +94548,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +1.2: syntax error: invalid character: '#' +./calc.at:1405: cat stderr +./calc.at:1403: cat stderr +./calc.at:1407: cat stderr input: | (1 + # + 1) = 1111 ./calc.at:1405: $PREPARSER ./calc input +input: stderr: -stderr: + | (# + 1) = 1111 +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 1.6: syntax error: invalid character: '#' +input: +./calc.at:1403: $PREPARSER ./calc input ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1407: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1407: $PREPARSER ./calc input stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.6: syntax error: invalid character: '#' -input: - | (- *) + (1 2) = 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./calc.at:1405: cat stderr stderr: +521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Reading a token @@ -94491,9 +94712,11 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1402: cat stderr ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1405: cat stderr Starting parse Entering state 0 Reading a token @@ -94610,924 +94833,21 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +1.2: syntax error: invalid character: '#' input: - | (1 + 1) / (1 - 1) -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11-17: error: null divisor -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1407: cat stderr -./calc.at:1405: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1407: $PREPARSER ./calc input -514. calc.at:1405: stderr: - ok -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: cat stderr -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) + | (1 + # + 1) = 1111 +./calc.at:1402: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !- ++ -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -522. calc.at:1426: testing Calculator lalr1.cc %header ... -./calc.at:1426: mv calc.y.tmp calc.y +./calc.at:1416: mv calc.y.tmp calc.y -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1407: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -stderr: -stdout: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1407: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1407: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' + | (1 + 1) / (1 - 1) +./calc.at:1405: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.6: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95537,168 +94857,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1407: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +1.6: syntax error: invalid character: '#' ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -95710,155 +94872,9 @@ }eg ' expout || exit 77 stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1407: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +./calc.at:1403: cat stderr stderr: +1.11-17: error: null divisor stdout: ./calc.at:1408: "$PERL" -ne ' chomp; @@ -95871,7 +94887,7 @@ || /\t/ )' calc.c calc.h -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95882,6 +94898,11 @@ }eg ' expout || exit 77 input: +./calc.at:1405: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1402: cat stderr +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -95896,11 +94917,25 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1408: $PREPARSER ./calc input -./calc.at:1407: cat stderr +stderr: input: - | (1 + # + 1) = 1111 +1.6: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1402: $PREPARSER ./calc input +514. calc.at:1405: ok stderr: +stderr: +1.11-17: error: null divisor +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.6: syntax error: invalid character: '#' + | (* *) + (*) + (*) ./calc.at:1407: $PREPARSER ./calc input +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -96737,22 +95772,135 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: +1.11-17: error: null divisor ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1413: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -97590,6 +96738,7 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) + Starting parse Entering state 0 Reading a token @@ -97597,32 +96746,15 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) +Next token is token '*' (1.1: ) +syntax error Shifting token error (1.1: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -97636,78 +96768,54 @@ -> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 28 +Entering state 30 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) + $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) +Next token is token '*' (1.1: ) +syntax error Shifting token error (1.1: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -97719,32 +96827,20 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 +Entering state 30 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 3333) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -97758,41 +96854,7 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: -stderr: -stdout: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: - | 1 2 -./calc.at:1408: $PREPARSER ./calc input -stderr: -input: -stdout: -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -97802,22 +96864,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1409: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1402: cat stderr +./calc.at:1403: cat stderr +./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -97834,6 +96885,10 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +512. calc.at:1402: ok +stderr: +input: Starting parse Entering state 0 Reading a token @@ -97845,1270 +96900,4810 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1403: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +./calc.at:1407: cat stderr +./calc.at:1408: cat stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1407: $PREPARSER ./calc input + | 1//2 +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +./calc.at:1403: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1407: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +513. calc.at:1403: ok +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1408: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) + +523. calc.at:1431: testing Calculator C++ ... +./calc.at:1431: mv calc.y.tmp calc.y + +522. calc.at:1426: testing Calculator lalr1.cc %header ... +./calc.at:1426: mv calc.y.tmp calc.y + +./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1407: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1408: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 +stderr: +./calc.at:1408: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +524. calc.at:1432: testing Calculator C++ %locations ... +./calc.at:1432: mv calc.y.tmp calc.y + +stderr: +./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1408: cat stderr +./calc.at:1407: cat stderr +input: + | + | +1 +./calc.at:1408: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) + | (#) + (#) = 2222 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +./calc.at:1408: $PREPARSER ./calc /dev/null +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) ./calc.at:1407: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1408: cat stderr + | (1 + #) = 1111 +./calc.at:1407: $PREPARSER ./calc input stderr: -stderr: +./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 12 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1408: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 Reading a token -Next token is token "number" (7.9: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1408: cat stderr +input: +input: + | (!!) + (1 2) = 1 + | (# + 1) = 1111 +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: +./calc.at:1407: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1408: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (* *) + (*) + (*) +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1407: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: cat stderr +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +515. calc.at:1407: ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1408: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: +525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... +./calc.at:1433: mv calc.y.tmp calc.y + + | (#) + (#) = 2222 +./calc.at:1408: $PREPARSER ./calc input +stderr: +./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +stderr: +stdout: +./calc.at:1414: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): @@ -99528,10 +102123,8 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -stdout: -======== Testing with C++ standard flags: '' Starting parse Entering state 0 Reading a token @@ -99568,14 +102161,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -99650,20 +102243,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -99686,7 +102279,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -99750,14 +102343,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -99779,7 +102372,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -99822,7 +102415,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -99830,7 +102423,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -99850,7 +102443,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -99923,19 +102516,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -99957,7 +102550,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -100017,7 +102610,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -100036,7 +102629,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -100059,7 +102652,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -100122,7 +102715,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -100131,7 +102724,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -100139,7 +102732,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -100224,14 +102817,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -100295,7 +102888,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -100304,7 +102897,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -100324,7 +102917,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -100368,36 +102961,26 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -input: -./calc.at:1411: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - | 1 2 -./calc.at:1413: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1414: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -100414,6 +102997,73 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +stdout: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1413: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1416: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1416: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -100450,14 +103100,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -100532,20 +103182,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -100568,7 +103218,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -100632,14 +103282,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -100661,7 +103311,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -100704,7 +103354,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -100712,7 +103362,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -100732,7 +103382,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -100805,19 +103455,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -100839,7 +103489,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -100899,7 +103549,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -100918,7 +103568,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -100941,7 +103591,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -101004,7 +103654,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -101013,7 +103663,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -101021,7 +103671,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -101106,14 +103756,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -101177,7 +103827,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -101186,7 +103836,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -101206,7 +103856,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -101250,368 +103900,11 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1 2 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1408: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -Starting parse -Entering state 0 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -102445,29 +104738,7 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: cat stderr stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -103304,25 +105575,11 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1 2 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1408: cat stderr - | 1//2 +./calc.at:1416: $PREPARSER ./calc input stderr: -./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -103334,57 +105591,833 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -input: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: cat stderr - | 1//2 -./calc.at:1409: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -input: - | error -Starting parse -Entering state 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) @@ -103395,18 +106428,83 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +input: + | 1 2 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: $PREPARSER ./calc input +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1416: cat stderr +516. calc.at:1408: ok +input: + | 1//2 +./calc.at:1416: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1414: $PREPARSER ./calc input stderr: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -103428,8 +106526,7 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -515. calc.at:1407: ok -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -103439,7 +106536,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -103460,46 +106556,55 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1413: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1411: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) -input: | 1//2 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: cat stderr +./calc.at:1413: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -103521,9 +106626,17 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1413: cat stderr -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -103533,7 +106646,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr stderr: +./calc.at:1414: cat stderr input: Starting parse Entering state 0 @@ -103555,13 +106671,31 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -input: | error -./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | error -./calc.at:1408: cat stderr -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1413: cat stderr +stderr: stderr: Starting parse Entering state 0 @@ -103575,7 +106709,31 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; +stderr: +stdout: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | error +stderr: +./calc.at:1413: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -103585,11 +106743,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -103597,9 +106750,18 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -./calc.at:1411: cat stderr -stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr stderr: Starting parse Entering state 0 @@ -103607,7 +106769,12 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -stdout: +input: + | 1 = 2 = 3 +./calc.at:1414: cat stderr +./calc.at:1416: $PREPARSER ./calc input +input: +stderr: Starting parse Entering state 0 Reading a token @@ -103637,11 +106804,19 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -103652,6 +106827,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1411: $PREPARSER ./calc input +input: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1413: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -103681,52 +106863,7 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) - | error -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: $PREPARSER ./calc input -stderr: -./calc.at:1413: cat stderr -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -input: -./calc.at:1408: cat stderr - | 1 = 2 = 3 -./calc.at:1413: $PREPARSER ./calc input -input: stderr: - | 1 = 2 = 3 Starting parse Entering state 0 Reading a token @@ -103756,23 +106893,11 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1413: $PREPARSER ./calc input stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -103802,7 +106927,7 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -103815,9 +106940,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -103825,24 +106950,59 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 +Entering state 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1411: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -103850,67 +107010,523 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -523. calc.at:1431: testing Calculator C++ ... -stderr: -./calc.at:1431: mv calc.y.tmp calc.y - -input: -stderr: -Starting parse -Entering state 0 +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -Starting parse -Entering state 0 +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting tok./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -103920,9 +107536,245 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1411: $PREPARSER ./calc input -stderr: +en ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) Starting parse Entering state 0 Reading a token @@ -103952,9 +107804,847 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1413: cat stderr ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '('./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -103964,7 +108654,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 2 +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1414: cat stderr stderr: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -103994,27 +108704,27 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr +./calc.at:1416: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) input: | | +1 -stdout: -./calc.at:1413: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1408: cat stderr -stderr: -input: +./calc.at:1414: $PREPARSER ./calc input ./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -104025,6 +108735,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +stderr: +./calc.at:1411: cat stderr Starting parse Entering state 0 Reading a token @@ -104046,18 +108759,19 @@ Cleanup: discarding lookahead token '+' (2.1: ) | | +1 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc /dev/null -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) Starting parse Entering state 0 Reading a token @@ -104077,11 +108791,9 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -104101,15 +108813,9 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) + | 1//2 +./calc.at:1411: $PREPARSER ./calc input stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -input: Starting parse Entering state 0 Reading a token @@ -104129,10 +108835,33 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1413: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | | +1 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104142,11 +108871,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1413: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +Starting parse +Entering state 0 +Reading a token Next token is token '\n' (1.1-2.0: ) Shifting token '\n' (1.1-2.0: ) Entering state 3 @@ -104163,19 +108913,8 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1414: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104185,8 +108924,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: $PREPARSER ./calc /dev/null stderr: -./calc.at:1413: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -104206,19 +108946,41 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1408: cat stderr -./calc.at:1409: cat stderr stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +./calc.at:1416: cat stderr +stderr: +./calc.at:1409: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc /dev/null -input: -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1411: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $PREPARSER ./calc /dev/null +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104228,293 +108990,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1408: $PREPARSER ./calc input stderr: +input: Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1411: cat stderr + | error +./calc.at:1411: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1413: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1413: $PREPARSER ./calc /dev/null +526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1411: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1434: mv calc.y.tmp calc.y + stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -439. types.at:139: stderr: - ok +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104524,7 +109067,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104537,238 +109081,77 @@ Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Reading a token -Next token is token '*' (1.39: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104776,337 +109159,1703 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1413: cat stderr -./calc.at:1409: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1409: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1408: cat stderr -./calc.at:1411: cat stderr -stderr: -./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1416: cat stderr +stderr: +stderr: +./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +input: +./calc.at:1411: cat stderr + | 1 2 +input: +input: +./calc.at:1409: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +./calc.at:1411: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stderr: +./calc.at:1413: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Reading a token @@ -105119,7 +110868,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -105138,7 +110887,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -105157,7 +110906,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -105165,7 +110914,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -105197,7 +110946,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -105205,7 +110954,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -105240,7 +110989,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -105264,7 +111013,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -105272,7 +111021,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -105316,128 +111065,35 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (!!) + (1 2) = 1 -./calc.at:1408: $PREPARSER ./calc input -input: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) Starting parse Entering state 0 Reading a token @@ -105685,10 +111341,58 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +Starting parse +Entering state 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -105700,7 +111404,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -105736,7 +111440,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -105755,7 +111459,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -105774,7 +111478,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -105782,7 +111486,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -105814,7 +111518,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -105822,7 +111526,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -105857,7 +111561,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -105881,7 +111585,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -105889,7 +111593,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -105933,6 +111637,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: Starting parse Entering state 0 Reading a token @@ -106180,8 +111885,52 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1411: cat stderr Starting parse Entering state 0 Reading a token @@ -106189,98 +111938,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token ')' (1.12: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -106293,8 +112178,43 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: cat stderr +./calc.at:1414: cat stderr +input: + | 1//2 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: $PREPARSER ./calc input stderr: +input: +input: stderr: + | + | +1 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -106542,70 +112462,31 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1413: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1408: cat stderr -input: -stdout: -input: -./calc.at:1411: cat stderr -./types.at:139: $PREPARSER ./test - | (!!) + (1 2) = 1 -./calc.at:1409: $PREPARSER ./calc input | (!!) + (1 2) = 1 -input: -./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input stderr: stderr: - | (- *) + (1 2) = 1 -./calc.at:1408: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -106620,7 +112501,7 @@ Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 -Reducing stack 0 by rule 16 (line 128): +Reducing stack 0 by rule 16 (line 116): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Shifting token error (1.2-3: ) @@ -106629,7 +112510,7 @@ Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -106665,7 +112546,7 @@ Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -106673,7 +112554,7 @@ Entering state 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -106717,14 +112598,48 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: stderr: -======== Testing with C++ standard flags: '' -stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -106739,7 +112654,7 @@ Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) Entering state 16 -Reducing stack 0 by rule 16 (line 128): +Reducing stack 0 by rule 16 (line 116): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Shifting token error (1.2-3: ) @@ -106748,7 +112663,7 @@ Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -106784,7 +112699,7 @@ Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -106792,7 +112707,7 @@ Entering state 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -106836,6 +112751,18 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -106947,7 +112874,18 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -106955,103 +112893,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -107064,7 +112997,85 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | error +./calc.at:1411: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: cat stderr +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1416: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) + | (!!) + (1 2) = 1 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: cat stderr +input: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -107176,12 +113187,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -524. calc.at:1432: testing Calculator C++ %locations ... -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1432: mv calc.y.tmp calc.y - +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr stderr: Starting parse Entering state 0 @@ -107198,7 +113212,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 127): +Reducing stack 0 by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -107211,7 +113225,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -107247,7 +113261,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -107255,7 +113269,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -107299,118 +113313,17 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -107522,58 +113435,14 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -./calc.at:1408: cat stderr -./calc.at:1413: cat stderr -input: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (- *) + (1 2) = 1 -./calc.at:1411: cat stderr -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input input: + | 1 = 2 = 3 stderr: -input: - | (* *) + (*) + (*) +./calc.at:1411: cat stderr +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -107589,7 +113458,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 127): +Reducing stack 0 by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -107602,7 +113471,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -107638,7 +113507,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -107646,7 +113515,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -107690,15 +113559,51 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1408: $PREPARSER ./calc input - | (- *) + (1 2) = 1 - | (- *) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: $PREPARSER ./calc input -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -107714,7 +113619,7 @@ 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 127): +Reducing stack 0 by rule 15 (line 115): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Shifting token error (1.2-4: ) @@ -107727,7 +113632,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -107763,7 +113668,7 @@ Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -107771,7 +113676,7 @@ Entering state 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -107815,6 +113720,63 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +input: + | (- *) + (1 2) = 1 +./calc.at:1416: cat stderr +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -107822,102 +113784,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token ')' (1.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -107930,9 +114024,11 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: stderr: +./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -108049,6 +114145,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (* *) + (*) + (*) +./calc.at:1416: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -108165,9 +114264,258 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +input: + | + | +1 +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -108190,7 +114538,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -108217,7 +114565,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -108225,7 +114573,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -108251,7 +114599,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -108259,7 +114607,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -108283,9 +114631,21 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -108295,6 +114655,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -108411,147 +114772,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -440. types.at:139: ok -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: cat stderr +./calc.at:1414: cat stderr input: | (* *) + (*) + (*) +./calc.at:1414: $PREPARSER ./calc input ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -108562,13 +114786,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: $PREPARSER ./calc input -input: -./calc.at:1411: cat stderr stderr: -./calc.at:1413: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -108591,7 +114809,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -108618,7 +114836,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -108626,7 +114844,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -108652,7 +114870,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -108660,7 +114878,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -108684,82 +114902,12 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (* *) + (*) + (*) -input: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -108779,7 +114927,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -108806,7 +114954,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -108814,7 +114962,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -108840,7 +114988,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -108848,7 +114996,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -108872,7 +115020,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: $PREPARSER ./calc input +input: | (* *) + (*) + (*) ./calc.at:1413: $PREPARSER ./calc input stderr: @@ -108991,8 +115139,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -109002,72 +115150,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1414: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: cat stderr stderr: Starting parse Entering state 0 @@ -109184,8 +115269,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: cat stderr -stderr: Starting parse Entering state 0 Reading a token @@ -109301,115 +115384,30 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1408: $PREPARSER ./calc input -stderr: -stderr: +input: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1414: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -109417,11 +115415,10 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: Starting parse Entering state 0 @@ -109459,14 +115456,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -109480,31 +115477,128 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -109515,6 +115609,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1416: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: Starting parse Entering state 0 @@ -109552,14 +115668,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -109573,20 +115689,133 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: | 1 + 2 * 3 + !+ ++ -./calc.at:1409: $PREPARSER ./calc input -./types.at:139: ./check -./calc.at:1411: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1416: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1413: cat stderr stderr: Starting parse Entering state 0 @@ -109624,14 +115853,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -109648,15 +115877,22 @@ Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) Entering state 14 -Reducing stack 0 by rule 17 (line 129): +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -109666,7 +115902,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr stderr: Starting parse Entering state 0 @@ -109704,14 +115939,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -109725,25 +115960,23 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !+ ++ -./calc.at:1411: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: cat stderr -input: +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +./calc.at:1409: cat stderr +stderr: stderr: - | 1 + 2 * 3 + !+ ++ stderr: Starting parse Entering state 0 @@ -109781,14 +116014,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -109805,12 +116038,11 @@ Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 13 -Reducing stack 0 by rule 18 (line 130): +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -109876,9 +116108,7 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: $PREPARSER ./calc input -stderr: +./calc.at:1409: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -109944,7 +116174,32 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (- *) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ stderr: +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -109981,14 +116236,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -110002,16 +116257,16 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -110069,99 +116324,261 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) input: - | 1 + 2 * 3 + !- ++ -./calc.at:1411: $PREPARSER ./calc input -stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !* ++ Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1408: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | 1 + 2 * 3 + !- ++ +./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -110227,10 +116644,7 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: $PREPARSER ./calc input stderr: -./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -110267,14 +116681,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -110288,16 +116702,48 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !* ++ +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -110356,14 +116802,19 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1411: cat stderr Starting parse Entering state 0 Reading a token @@ -110429,15 +116880,12 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1416: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... -./calc.at:1433: mv calc.y.tmp calc.y - +./calc.at:1409: $PREPARSER ./calc input +input: Starting parse Entering state 0 Reading a token @@ -110474,14 +116922,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -110498,13 +116946,18 @@ Next token is token '*' (1.14: ) Shifting token '*' (1.14: ) Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Reducing stack 0 by rule 19 (line 119): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +./calc.at:1411: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110515,153 +116968,374 @@ }eg ' expout || exit 77 stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '*' (1.39: ) Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1413: cat stderr stderr: -./calc.at:1411: cat stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110671,6 +117345,370 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -110707,14 +117745,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -110731,13 +117769,17 @@ Next token is token '*' (1.14: ) Shifting token '*' (1.14: ) Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Reducing stack 0 by rule 19 (line 119): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: cat stderr +./calc.at:1414: cat stderr +input: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1413: $PREPARSER ./calc input ./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -110748,20 +117790,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1409: cat stderr -input: -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1413: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1411: $PREPARSER ./calc input -input: stderr: - | (#) + (#) = 2222 - | (#) + (#) = 2222 Starting parse Entering state 0 Reading a token @@ -110828,8 +117857,6 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -110897,10 +117924,22 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1409: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1411: cat stderr stderr: Starting parse Entering state 0 @@ -110968,6 +118007,18 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: Starting parse Entering state 0 Reading a token @@ -110987,7 +118038,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -111014,7 +118065,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -111022,7 +118073,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -111065,7 +118116,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -111073,76 +118127,90 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' (1.3: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Next token is token '\n' (1.17-2.0: ) @@ -111163,75 +118231,134 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1411: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -111251,7 +118378,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -111278,7 +118405,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -111286,7 +118413,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -111329,6 +118456,78 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: + | (#) + (#) = 2222 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1413: cat stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -111349,7 +118548,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -111376,7 +118575,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -111384,7 +118583,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -111427,29 +118626,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./types.at:139: $PREPARSER ./test ./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -111460,9 +118636,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1411: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; +input: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | (#) + (#) = 2222 +./calc.at:1413: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111472,27 +118717,80 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1409: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 ./calc.at:1411: $PREPARSER ./calc input -input: -./calc.at:1408: cat stderr -input: -======== Testing with C++ standard flags: '' - | (#) + (#) = 2222 -./calc.at:1413: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1409: cat stderr stderr: -stdout: stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -111587,8 +118885,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check Starting parse Entering state 0 Reading a token @@ -111686,78 +118982,196 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1414: cat stderr input: - | (1 + #) = 1111 -./calc.at:1408: $PREPARSER ./calc input + | (- *) + (1 2) = 1 ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1409: $PREPARSER ./calc input stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | (1 + #) = 1111 +stderr: +stderr: +./calc.at:1414: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -111771,7 +119185,6 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token @@ -111869,7 +119282,19 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: cat stderr +stderr: stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -111903,7 +119328,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -111947,8 +119372,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -111956,84 +119380,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -112046,8 +119489,19 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: +./calc.at:1411: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -112081,7 +119535,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -112125,6 +119579,22 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + #) = 1111 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1411: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -112158,7 +119628,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -112202,28 +119672,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1413: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112233,29 +119683,147 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1408: cat stderr -input: +./calc.at:1409: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1414: cat stderr | (1 + #) = 1111 ./calc.at:1413: $PREPARSER ./calc input -input: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1411: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: Starting parse Entering state 0 @@ -112334,6 +119902,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: Starting parse Entering state 0 Reading a token @@ -112411,8 +119980,35 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1409: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -112421,56 +120017,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -112483,9 +120125,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -112563,10 +120203,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112592,7 +120228,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -112636,7 +120272,125 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr +./calc.at:1411: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -112663,7 +120417,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -112707,7 +120461,42 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -112715,26 +120504,18 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Error: discarding token '+' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -112785,17 +120566,117 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1409: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (# + 1) = 1111 +stderr: +./calc.at:1413: $PREPARSER ./calc input stderr: + | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 Reading a token @@ -112821,7 +120702,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -112865,48 +120746,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: cat stderr -./calc.at:1411: cat stderr -./calc.at:1413: cat stderr -input: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1411: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./calc.at:1409: cat stderr +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112914,70 +120754,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token ')' (1.11: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -112991,10 +120845,76 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: + | (1 + # + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -113064,8 +120984,17 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113105,7 +121034,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -113149,7 +121078,89 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: +./calc.at:1416: cat stderr +stderr: +./calc.at:1411: cat stderr + | 1 + 2 * 3 + !- ++ stderr: Starting parse Entering state 0 @@ -113220,9 +121231,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 ./calc.at:1409: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -113230,56 +121239,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -113292,8 +121315,97 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1416: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113333,7 +121445,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -113377,29 +121489,158 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: + | (1 + #) = 1111 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1414: cat stderr +./calc.at:1413: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113439,7 +121680,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -113483,6 +121724,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: Starting parse Entering state 0 Reading a token @@ -113490,18 +121733,26 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -113552,14 +121803,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: cat stderr -stderr: -./calc.at:1411: cat stderr -stdout: input: - | (1 + 1) / (1 - 1) -./types.at:139: ./check -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113569,8 +121814,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1414: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1409: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -113580,10 +121829,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: stderr: -./calc.at:1413: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -113604,6 +121860,89 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token Next token is token "number" (1.6: 1) Shifting token "number" (1.6: 1) Entering state 1 @@ -113613,7 +121952,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -113622,7 +121961,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -113658,7 +121997,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -113667,7 +122006,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -113675,7 +122014,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -113700,21 +122039,19 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -stdout: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +./calc.at:1416: cat stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1409: $PREPARSER ./calc input +input: +stderr: +input: + | (1 + 1) / (1 - 1) +./calc.at:1416: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -113744,7 +122081,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -113753,7 +122090,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -113789,7 +122126,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -113798,7 +122135,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -113806,7 +122143,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -113831,10 +122168,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -113918,7 +122251,76 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1411: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -113939,58 +122341,90 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114003,20 +122437,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Reading a token @@ -114024,70 +122444,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114100,93 +122506,152 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1414: $PREPARSER ./calc input -input: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114199,33 +122664,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: cat stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -516. calc.at:1408: ok +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1413: cat stderr stderr: +./calc.at:1414: cat stderr Starting parse Entering state 0 Reading a token @@ -114254,7 +122696,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -114263,7 +122705,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -114299,7 +122741,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -114308,7 +122750,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -114316,7 +122758,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -114341,2039 +122783,4839 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +./calc.at:1409: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: cat stderr +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +520. calc.at:1414: | (1 + # + 1) = 1111 + ok +./calc.at:1411: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +521. calc.at:1416: ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr + +input: + | (1 + #) = 1111 +./calc.at:1409: $PREPARSER ./calc input +519. calc.at:1413: ok +./calc.at:1411: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) +./calc.at:1411: $PREPARSER ./calc input +stderr: + +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr +input: +stderr: + | (# + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1435: mv calc.y.tmp calc.y + +528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1437: mv calc.y.tmp calc.y + +stderr: +./calc.at:1411: cat stderr +./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +518. calc.at:1411: ok +./calc.at:1409: cat stderr +input: + + | (1 + # + 1) = 1111 +./calc.at:1409: $PREPARSER ./calc input +stderr: +529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1438: mv calc.y.tmp calc.y + +./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./calc.at:1433: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... +./calc.at:1440: mv calc.y.tmp calc.y + +./calc.at:1409: cat stderr +./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1433: $PREPARSER ./calc input +stdout: +stderr: +./calc.at:1432: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +stderr: + | (1 + 1) / (1 - 1) +stdout: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1431: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 2 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1432: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1431: $PREPARSER ./calc input +stderr: +./calc.at:1409: cat stderr +stderr: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +stderr: +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +517. calc.at:1409: ok +stderr: +1.3: syntax error +input: + +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1432: $PREPARSER ./calc input + | 1 2 +./calc.at:1431: $PREPARSER ./calc input +stderr: +./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +syntax error +1.3: syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +stderr: +1.3: syntax error +input: + | 1//2 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.3: syntax error +syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr +stderr: +1.3: syntax error +input: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1431: cat stderr +./calc.at:1433: cat stderr +531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1441: mv calc.y.tmp calc.y + +input: +./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1433: $PREPARSER ./calc input +input: + | 1//2 +stderr: +./calc.at:1431: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1432: cat stderr +stderr: +syntax error +input: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1432: $PREPARSER ./calc input +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +1.1: syntax error + | 1 = 2 = 3 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1431: cat stderr +stderr: +1.7: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.7: syntax error +input: +./calc.at:1432: cat stderr + | error +./calc.at:1431: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +stderr: +1.7: syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +syntax error +stderr: +1.7: syntax error +./calc.at:1433: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1433: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: cat stderr +input: +./calc.at:1432: cat stderr +stderr: + | 1 = 2 = 3 +./calc.at:1431: $PREPARSER ./calc input +2.1: syntax error +stderr: +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +stderr: +./calc.at:1433: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +./calc.at:1431: cat stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +input: + | + | +1 +./calc.at:1431: $PREPARSER ./calc input +2.1: syntax error +stderr: +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +./calc.at:1432: cat stderr +./calc.at:1432: $PREPARSER ./calc /dev/null +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1431: cat stderr +./calc.at:1431: $PREPARSER ./calc /dev/null +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1432: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1433: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1431: cat stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1431: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1432: cat stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +input: + | (!!) + (1 2) = 1 +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1431: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: cat stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input +input: +stderr: +syntax error +error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input +stderr: +stderr: +syntax error +error: 2222 != 1 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1431: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (* *) + (*) + (*) +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1433: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +syntax error +syntax error +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +stderr: +stdout: +./calc.at:1434: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1434: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1433: $PREPARSER ./calc input +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 2 +./calc.at:1434: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (#) + (#) = 2222 +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1431: cat stderr +input: + | (* *) + (*) + (*) +input: +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1434: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1431: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1//2 +./calc.at:1434: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1431: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +stderr: +stdout: +./calc.at:1437: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1437: $PREPARSER ./calc input +input: + | (1 + #) = 1111 +./calc.at:1433: $PREPARSER ./calc input +stderr: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +input: + | (#) + (#) = 2222 +./calc.at:1431: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected number +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error, unexpected number +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr +./calc.at:1437: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 +./calc.at:1433: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: + | (1 + #) = 1111 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error: invalid character: '#' +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1437: cat stderr +./calc.at:1433: cat stderr +input: + | error +./calc.at:1437: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.1: syntax error, unexpected invalid token +./calc.at:1431: cat stderr +stderr: +stdout: +./calc.at:1440: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +syntax error: invalid character: '#' +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1433: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) -> $$ = nterm exp (1.1-13: 7) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) -Entering state 21 +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) -Entering state 22 +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) -Entering state 31 +Entering state 30 +Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) -> $$ = nterm exp (2.1-10: -5) Entering state 8 +Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) -> $$ = nterm exp (2.1-15: -5) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 +Stack now 0 6 2 10 Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) -> $$ = nterm exp (4.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 +Stack now 0 6 4 Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 +Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '('input: - (5.1: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) -> $$ = nterm exp (5.1-6: 1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) -> $$ = nterm exp (5.1-10: 1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 +Stack now 0 6 2 Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 +Stack now 0 6 2 2 Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 +Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) -> $$ = nterm exp (7.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) -> $$ = nterm exp (9.1-5: -1) Entering state 8 +Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) -> $$ = nterm exp (9.1-9: -4) Entering state 8 +Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) -> $$ = nterm exp (9.1-14: -4) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 +Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) -> $$ = nterm exp (10.6-10: -1) Entering state 12 +Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) -> $$ = nterm exp (10.1-11: 2) Entering state 8 +Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) -> $$ = nterm exp (10.1-15: 2) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) -> $$ = nterm exp (12.1-5: 256) Entering state 8 +Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) -> $$ = nterm exp (12.1-11: 256) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 +Stack now 0 6 4 12 Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) -> $$ = nterm exp (13.2-4: 4) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) -> $$ = nterm exp (13.1-7: 64) Entering state 8 +Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) -> $$ = nterm exp (13.1-12: 64) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Cleanup: popping token "end of input" (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1413: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 +./calc.at:1437: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) -> $$ = nterm exp (1.1-13: 7) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) -Entering state 21 +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) -Entering state 22 +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) -Entering state 31 +Entering state 30 +Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) -> $$ = nterm exp (2.1-10: -5) Entering state 8 +Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) -> $$ = nterm exp (2.1-15: -5) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 +Stack now 0 6 2 10 Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) -> $$ = nterm exp (4.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 +Stack now 0 6 4 Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 +Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) -> $$ = nterm exp (5.1-6: 1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) -> $$ = nterm exp (5.1-10: 1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 +Stack now 0 6 2 Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 +Stack now 0 6 2 2 Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 +Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) -> $$ = nterm exp (7.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) -> $$ = nterm exp (9.1-5: -1) Entering state 8 +Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) -> $$ = nterm exp (9.1-9: -4) Entering state 8 +Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) -> $$ = nterm exp (9.1-14: -4) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 +Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) -> $$ = nterm exp (10.6-10: -1) Entering state 12 +Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) -> $$ = nterm exp (10.1-11: 2) Entering state 8 +Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) -> $$ = nterm exp (10.1-15: 2) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) -> $$ = nterm exp (12.1-5: 256) Entering state 8 +Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) -> $$ = nterm exp (12.1-11: 256) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 +Stack now 0 6 4 12 Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) -> $$ = nterm exp (13.2-4: 4) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) -> $$ = nterm exp (13.1-7: 64) Entering state 8 +Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) -> $$ = nterm exp (13.1-12: 64) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Cleanup: popping token "end of input" (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) +input: +stderr: + | (1 + 1) / (1 - 1) +1.7: syntax error, unexpected '=' +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: 1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +1.3: syntax error +stderr: +1.7: syntax error, unexpected '=' +input: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116383,262 +127625,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: | 1 2 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) 1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) +Next token is token number (1.3: 2) 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1409: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -517. calc.at:1409: ok -./calc.at:1411: "$PERL" -pi -e 'use strict; +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116651,20 +127674,28 @@ stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) +Next token is token number (1.3: 2) 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1413: "$PERL" -pi -e 'use strict; +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1431: cat stderr +./calc.at:1437: cat stderr +./calc.at:1434: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116674,9 +127705,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: cat stderr -./calc.at:1413: cat stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116686,105 +127715,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1414: cat stderr -518. calc.at:1411: ok -519. calc.at:1413: ok +input: + | (1 + # + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1437: $PREPARSER ./calc input +input: +./calc.at:1440: cat stderr + | error +./calc.at:1434: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1433: cat stderr +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +stderr: +syntax error: invalid character: '#' +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error, unexpected '+' input: | 1//2 -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) - -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +1.1: syntax error stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '/' (1.2: ) Shifting token '/' (1.2: ) -Entering state 23 +Entering state 22 +Stack now 0 8 22 Reading a token Next token is token '/' (1.3: ) 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) +Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... -./calc.at:1434: mv calc.y.tmp calc.y - -./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1414: cat stderr -input: - | error -./calc.at:1414: $PREPARSER ./calc input -stderr: -527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1435: mv calc.y.tmp calc.y - -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1437: mv calc.y.tmp calc.y - -./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1438: mv calc.y.tmp calc.y - -./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1414: "$PERL" -pi -e 'use strict; +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116794,73 +127784,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116870,59 +127822,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -input: - | - | +1 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116932,26 +127832,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1414: cat stderr -./calc.at:1414: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1437: cat stderr +./calc.at:1437: $PREPARSER ./calc /dev/null +./calc.at:1431: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116961,511 +127845,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -441. types.at:139: ok -./calc.at:1414: cat stderr +./calc.at:1434: cat stderr +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1440: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1414: $PREPARSER ./calc input - +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1431: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +./calc.at:1434: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected end of input +stderr: +1.7: syntax error +stderr: +error: null divisor +525. calc.at:1433: ok +input: + | error +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +1.7: syntax error Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117475,242 +127890,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1414: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | (!!) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117720,135 +127901,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... -./calc.at:1440: mv calc.y.tmp calc.y - -./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1414: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1437: cat stderr stderr: stdout: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -ne ' +./calc.at:1438: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -117857,125 +127913,8 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c calc.h + )' calc.cc -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -117990,1954 +127929,2075 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: cat stderr +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1434: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) -> $$ = nterm exp (1.1-13: 7) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) -Entering state 21 +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) -Entering state 22 +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) -Entering state 31 +Entering state 30 +Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) -> $$ = nterm exp (2.1-10: -5) Entering state 8 +Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) -> $$ = nterm exp (2.1-15: -5) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 +Stack now 0 6 2 10 Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) -> $$ = nterm exp (4.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 +Stack now 0 6 4 Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 +Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) -> $$ = nterm exp (5.1-6: 1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) -> $$ = nterm exp (5.1-10: 1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 +Stack now 0 6 2 Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 +Stack now 0 6 2 2 Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 +Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) -> $$ = nterm exp (7.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) -> $$ = nterm exp (9.1-5: -1) Entering state 8 +Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) -> $$ = nterm exp (9.1-9: -4) Entering state 8 +Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) -> $$ = nterm exp (9.1-14: -4) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 +Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) -> $$ = nterm exp (10.6-10: -1) Entering state 12 +Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) -> $$ = nterm exp (10.1-11: 2) Entering state 8 +Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) -> $$ = nterm exp (10.1-15: 2) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) -> $$ = nterm exp (12.1-5: 256) Entering state 8 +Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) -> $$ = nterm exp (12.1-11: 256) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 +Stack now 0 6 4 12 Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) -> $$ = nterm exp (13.2-4: 4) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) -> $$ = nterm exp (13.1-7: 64) Entering state 8 +Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) -> $$ = nterm exp (13.1-12: 64) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Cleanup: popping token "end of input" (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (* *) + (*) + (*) -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | + | +1 +./calc.at:1434: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) -> $$ = nterm exp (1.1-13: 7) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) -Entering state 21 +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) -Entering state 22 +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) -Entering state 31 +Entering state 30 +Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) -Entering state 30 +Entering state 29 +Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) -> $$ = nterm exp (2.1-10: -5) Entering state 8 +Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) -> $$ = nterm exp (2.1-15: -5) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 +Stack now 0 6 2 10 Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) -> $$ = nterm exp (4.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 +Stack now 0 6 4 Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 +Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) -> $$ = nterm exp (5.1-6: 1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) -> $$ = nterm exp (5.1-10: 1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 +Stack now 0 6 2 Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 +Stack now 0 6 2 2 Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 +Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 +Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) Entering state 8 +Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) -> $$ = nterm exp (7.1-9: -1) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) -> $$ = nterm exp (9.1-5: -1) Entering state 8 +Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) -> $$ = nterm exp (9.1-9: -4) Entering state 8 +Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) -> $$ = nterm exp (9.1-14: -4) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 +Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) -Entering state 20 +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) -> $$ = nterm exp (10.6-10: -1) Entering state 12 +Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) -> $$ = nterm exp (10.5-11: -1) -Entering state 29 +Entering state 28 +Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) -> $$ = nterm exp (10.1-11: 2) Entering state 8 +Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) -> $$ = nterm exp (10.1-15: 2) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) -> $$ = nterm exp (12.1-5: 256) Entering state 8 +Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) -> $$ = nterm exp (12.1-11: 256) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 +Stack now 0 6 4 12 Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) -Entering state 24 +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) -Entering state 33 +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) -> $$ = nterm exp (13.2-4: 4) Entering state 12 +Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) -> $$ = nterm exp (13.1-5: 4) Entering state 8 +Stack now 0 6 8 Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) -Entering state 24 +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) -Entering state 33 +Entering state 32 +Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) -> $$ = nterm exp (13.1-7: 64) Entering state 8 +Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -Entering state 19 +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Entering state 27 +Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) -> $$ = nterm exp (13.1-12: 64) Entering state 8 +Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Cleanup: popping token "end of input" (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: - | 1 2 -./calc.at:1416: $PREPARSER ./calc input -stdout: stderr: +2.1: syntax error +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1438: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: ./check -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +2.1: syntax error +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119950,21 +130010,40 @@ stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) +Next token is token number (1.3: 2) 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1414: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1441: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119974,344 +130053,1087 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr stderr: -./calc.at:1416: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1441: $PREPARSER ./calc input +stdout: + +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -input: - | 1//2 - | 1 + 2 * 3 + !- ++ -./calc.at:1414: $PREPARSER ./calc input -stderr: -./calc.at:1416: $PREPARSER ./calc input -stdout: -stderr: -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -Starting parse -Entering state 0 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120321,459 +131143,1243 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -input: -input: - | 1 + 2 * 3 + !* ++ - | error -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1416: $PREPARSER ./calc input -stderr: + | (!!) + (1 2) = 1 +./calc.at:1437: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -stderr: -input: - | (#) + (#) = 2222 -stdout: -./calc.at:1414: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./types.at:139: $PREPARSER ./test -./calc.at:1416: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 +Stack now 0 6 8 19 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 +Stack now 0 6 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -======== Testing with C++ standard flags: '' +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1443: mv calc.y.tmp calc.y + +./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1434: cat stderr +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1434: $PREPARSER ./calc /dev/null +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1441: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.1: syntax error +input: +./calc.at:1440: cat stderr + | 1//2 +./calc.at:1438: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.1: syntax error + | 1 2 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +stderr: +./calc.at:1440: $PREPARSER ./calc input +stderr: +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.3: ) Shifting token '=' (1.3: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '=' (1.7: ) 1.7: syntax error, unexpected '=' Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 Error: popping token '=' (1.3: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) +Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1414: "$PERL" -pi -e 'use strict; +Stack now 0 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120783,7 +132389,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120793,220 +132402,344 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr +syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 stderr: -input: stdout: +./calc.at:1435: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1437: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1434: cat stderr +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (1 + #) = 1111 -./types.at:139: ./check -./calc.at:1416: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1//2 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1438: cat stderr stderr: -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1434: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: +./calc.at:1426: cat stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1440: cat stderr + | 1 2 +./calc.at:1435: $PREPARSER ./calc input + | error +./calc.at:1438: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +1.3: syntax error, unexpected number +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1440: $PREPARSER ./calc input +stderr: + | 1//2 +./calc.at:1426: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +stderr: +stderr: +1.3: syntax error, unexpected number stderr: Starting parse Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token '\n' (1.1-2.0: ) Shifting token '\n' (1.1-2.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 83): +Stack now 0 3 +Reducing stack by rule 3 (line 74): $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token Next token is token '+' (2.1: ) 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) +Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 +syntax error +./calc.at:1437: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: -stdout: -./calc.at:1416: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121016,8 +132749,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $PREPARSER ./test -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1441: cat stderr +stderr: + | (* *) + (*) + (*) +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1435: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1434: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121027,104 +132773,184 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | error + | 1 + 2 * 3 + !- ++ +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1438: cat stderr +input: + | 1//2 +./calc.at:1435: $PREPARSER ./calc input stderr: -./calc.at:1416: cat stderr -./calc.at:1416: $PREPARSER ./calc /dev/null stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (# + 1) = 1111 -./calc.at:1414: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' +input: + | (!!) + (1 2) = 1 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +stderr: +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1440: cat stderr +./calc.at:1438: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1440: $PREPARSER ./calc /dev/null +1.11: syntax error +1.1-16: error: 2222 != 1 Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +stderr: +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1432: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121134,78 +132960,172 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +input: + | error +./calc.at:1426: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1437: cat stderr +stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.8: syntax error: invalid character: '#' +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1435: cat stderr + | 1 + 2 * 3 + !+ ++ +stderr: +syntax error +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1434: cat stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +input: +./calc.at:1440: cat stderr + | error + | + | +1 +./calc.at:1435: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +stderr: +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +stderr: +1.1: syntax error, unexpected invalid token +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: cat stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1434: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121215,694 +133135,894 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: cat stderr +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: +1.1: syntax error, unexpected invalid token +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 input: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1437: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 +Stack now 0 4 11 Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) -> $$ = nterm exp (1.1-2: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) -Entering state 30 +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) -> $$ = nterm exp (1.7-11: 2) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) -Entering state 30 +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) -> $$ = nterm exp (1.7-15: 3) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 Shifting token error (1.7-18: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) -> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) -> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.23: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) -> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) -> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) -Entering state 31 +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) -> $$ = nterm exp (1.33-37: 2) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 Shifting token error (1.33-41: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) -> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) -> $$ = nterm exp (1.1-42: 4444) Entering state 8 +Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) 1.1-46: error: 4444 != 1 -> $$ = nterm exp (1.1-46: 4444) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr +./calc.at:1432: cat stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 +Stack now 0 4 11 Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) -> $$ = nterm exp (1.1-2: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) -Entering state 30 +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) -> $$ = nterm exp (1.7-11: 2) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) -Entering state 30 +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) -> $$ = nterm exp (1.7-15: 3) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 Shifting token error (1.7-18: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) -> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) -> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.23: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) -> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) -> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) -Entering state 31 +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) -> $$ = nterm exp (1.33-37: 2) Entering state 12 +Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 Shifting token error (1.33-41: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) -> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) -> $$ = nterm exp (1.1-42: 4444) Entering state 8 +Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) 1.1-46: error: 4444 != 1 -> $$ = nterm exp (1.1-46: 4444) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: cat stderr +input: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + #) = 1111 +error: null divisor +./calc.at:1432: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1438: cat stderr +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1441: cat stderr +./calc.at:1437: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | 1 = 2 = 3 +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1438: $PREPARSER ./calc /dev/null +1.6: syntax error: invalid character: '#' +syntax error +input: +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1441: $PREPARSER ./calc input + | (#) + (#) = 2222 +stderr: +./calc.at:1437: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +stderr: +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -./types.at:139: ./check -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +syntax error 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.7: syntax error, unexpected '=' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +input: +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121912,475 +134032,797 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1416: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1414: $PREPARSER ./calc input input: | (!!) + (1 2) = 1 -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1434: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 +Stack now 0 4 5 Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) +Stack now 0 4 Shifting token error (1.2-3: ) Entering state 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) -> $$ = nterm exp (1.1-4: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token "number" (1.11: 2) +Next token is token number (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) -> $$ = nterm exp (1.8-12: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) -> $$ = nterm exp (1.1-12: 2222) Entering state 8 +Stack now 0 8 Next token is token '=' (1.14: ) Shifting token '=' (1.14: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) 1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1431: cat stderr +./calc.at:1435: cat stderr +./calc.at:1438: cat stderr +./calc.at:1437: cat stderr stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 +Stack now 0 4 5 Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) +Stack now 0 4 Shifting token error (1.2-3: ) Entering state 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) -> $$ = nterm exp (1.1-4: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token "number" (1.11: 2) +Next token is token number (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) -> $$ = nterm exp (1.8-12: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) -> $$ = nterm exp (1.1-12: 2222) Entering state 8 +Stack now 0 8 Next token is token '=' (1.14: ) Shifting token '=' (1.14: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) 1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +./calc.at:1438: $PREPARSER ./calc input + | (1 + #) = 1111 + | + | +1 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1426: cat stderr +./calc.at:1432: cat stderr +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1441: cat stderr +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc /dev/null +./calc.at:1434: cat stderr +./calc.at:1440: cat stderr +stderr: +2.1: syntax error, unexpected '+' stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -stdout: -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +input: +input: + | + | +1 +input: +./calc.at:1426: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1440: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1432: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122390,8 +134832,328 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: ./check -./calc.at:1414: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122401,251 +135163,340 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -520. calc.at:1414: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: - | (- *) + (1 2) = 1 - -./calc.at:1416: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 +Stack now 0 4 2 Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): $1 = token '-' (1.2: ) $2 = token error (1.4: ) +Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token "number" (1.12: 2) +Next token is token number (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) -> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) -> $$ = nterm exp (1.1-13: 2222) Entering state 8 +Stack now 0 8 Next token is token '=' (1.15: ) Shifting token '=' (1.15: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) 1.1-17: error: 2222 != 1 -> $$ = nterm exp (1.1-17: 2222) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.2: syntax error: invalid character: '#' stderr: +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1438: cat stderr +./calc.at:1441: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 +Stack now 0 4 2 Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.4: ) Entering state 9 -Reducing stack 0 by rule 15 (line 115): +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): $1 = token '-' (1.2: ) $2 = token error (1.4: ) +Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token "number" (1.12: 2) +Next token is token number (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) -> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) -> $$ = nterm exp (1.1-13: 2222) Entering state 8 +Stack now 0 8 Next token is token '=' (1.15: ) Shifting token '=' (1.15: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) 1.1-17: error: 2222 != 1 -> $$ = nterm exp (1.1-17: 2222) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1435: cat stderr +./calc.at:1435: $PREPARSER ./calc /dev/null +input: +input: + | (# + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122655,248 +135506,1361 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr +stderr: + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1434: $PREPARSER ./calc input +input: +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error, unexpected end of input +stderr: +./calc.at:1440: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +stderr: input: +stderr: | (* *) + (*) + (*) -./calc.at:1416: $PREPARSER ./calc input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS Starting parse Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: $PREPARSER ./calc /dev/null +./calc.at:1432: cat stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 +Stack now 0 4 11 Next token is token '*' (1.2: ) Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.10: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.10: ) Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.9-11: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) -> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Stack now 0 8 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.15: ) Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.16: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.16: ) Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) -> $$ = nterm exp (1.1-17: 3333) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +input: +input: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (1 + # + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 +Stack now 0 4 11 Next token is token '*' (1.2: ) Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) -> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.10: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.10: ) Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.9-11: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) -> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Stack now 0 8 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '(' (1.15: ) Shifting token '(' (1.15: ) Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.16: ) Entering state 11 +Stack now 0 8 20 4 11 Next token is token '*' (1.16: ) Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) -> $$ = nterm exp (1.1-17: 3333) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... -./calc.at:1441: mv calc.y.tmp calc.y - -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1438: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +./calc.at:1441: cat stderr +stderr: + | (1 + # + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122906,281 +136870,855 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1416: cat stderr +1.6: syntax error: invalid character: '#' +stderr: + | (- *) + (1 2) = 1 +./calc.at:1438: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1416: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1441: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1434: $PREPARSER ./calc input +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1440: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +stderr: stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: +stderr: +./calc.at:1435: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1437: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1416: $PREPARSER ./calc input +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 +./calc.at:1435: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +./calc.at:1432: cat stderr +./calc.at:1437: $PREPARSER ./calc input +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123190,146 +137728,196 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr +./calc.at:1438: cat stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: input: - | 1 + 2 * 3 + !* ++ -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor + | 1 + 2 * 3 + !- ++ +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1441: cat stderr +stderr: +input: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +1.11-17: error: null divisor stderr: + | (* *) + (*) + (*) +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1434: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1432: $PREPARSER ./calc input stderr: +input: +input: +input: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) -Entering state 22 +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) -Entering state 31 +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -Entering state 21 +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) +Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123339,209 +137927,1154 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: - | (#) + (#) = 2222 -./calc.at:1416: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +stderr: +./calc.at:1441: $PREPARSER ./calc input + | (1 + #) = 1111 +1.11-17: error: null divisor + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Stack now 0 8 20 4 11 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +stderr: +./calc.at:1440: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +528. calc.at:1437: Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + ok +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1435: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1440: $PREPARSER ./calc input +stderr: +./calc.at:1438: cat stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + +stderr: +./calc.at:1434: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1438: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.7: ) - $2 = token error (1.8: ) + $2 = token error (1.1-8: ) $3 = token ')' (1.9: ) -> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) -> $$ = nterm exp (1.1-9: 2222) Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1434: $PREPARSER ./calc input +input: +stderr: + | (* *) + (*) + (*) +./calc.at:1441: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123551,168 +139084,760 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr +input: +stderr: + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1435: cat stderr +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1440: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +input: +stderr: +stderr: + | (* *) + (*) + (*) +./calc.at:1435: $PREPARSER ./calc input input: | (1 + #) = 1111 -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token 1.6: syntax error: invalid character: '#' Next token is token error (1.6: ) Error: popping token '+' (1.4: ) +Stack now 0 4 12 Error: popping nterm exp (1.2: 1) +Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) -> $$ = nterm exp (1.1-14: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token 1.6: syntax error: invalid character: '#' Next token is token error (1.6: ) Error: popping token '+' (1.4: ) +Stack now 0 4 12 Error: popping nterm exp (1.2: 1) +Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) -> $$ = nterm exp (1.1-14: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1441: cat stderr +./calc.at:1434: cat stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123722,159 +139847,650 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +input: +./calc.at:1435: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1440: cat stderr stderr: -stdout: -./types.at:139: $PREPARSER ./test -./calc.at:1416: cat stderr +./calc.at:1434: $PREPARSER ./calc input +input: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (#) + (#) = 2222 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1435: $PREPARSER ./calc input stderr: input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +stderr: | (# + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input stderr: -======== Testing with C++ standard flags: '' +./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token 1.2: syntax error: invalid character: '#' Next token is token error (1.2: ) -Shifting token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) -> $$ = nterm exp (1.1-14: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: +./calc.at:1445: mv calc.y.tmp calc.y + +stderr: stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token 1.2: syntax error: invalid character: '#' Next token is token error (1.2: ) -Shifting token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !- ++ +./calc.at:1441: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-7: 1111) $2 = token '=' (1.9: ) $3 = nterm exp (1.11-14: 1111) -> $$ = nterm exp (1.1-14: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.15-2.0: ) Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-14: 1111) $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123884,180 +140500,816 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +stderr: +stderr: + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +error: 2222 != 1 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +./calc.at:1438: cat stderr +stderr: +./calc.at:1434: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + #) = 1111 input: | (1 + # + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input +input: +./calc.at:1441: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1434: $PREPARSER ./calc input +input: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: + | (#) + (#) = 2222 stderr: +./calc.at:1441: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token 1.6: syntax error: invalid character: '#' Next token is token error (1.6: ) Error: popping token '+' (1.4: ) +Stack now 0 4 12 Error: popping nterm exp (1.2: 1) +Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.1-11: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-11: 1111) $2 = token '=' (1.13: ) $3 = nterm exp (1.15-18: 1111) -> $$ = nterm exp (1.1-18: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.19-2.0: ) Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-18: 1111) $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +1.11-17: error: null divisor +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token 1.6: syntax error: invalid character: '#' Next token is token error (1.6: ) Error: popping token '+' (1.4: ) +Stack now 0 4 12 Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.1-11: 1111) Entering state 8 +Stack now 0 8 Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) -Entering state 19 +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) -> $$ = nterm exp (1.15-18: 1111) -Entering state 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-11: 1111) $2 = token '=' (1.13: ) $3 = nterm exp (1.15-18: 1111) -> $$ = nterm exp (1.1-18: 1111) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.19-2.0: ) Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-18: 1111) $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124067,345 +141319,791 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +stderr: +./calc.at:1438: cat stderr +./calc.at:1426: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: cat stderr +input: +input: input: | (1 + 1) / (1 - 1) -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1441: cat stderr +./calc.at:1426: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr stderr: +stderr: +input: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) -Entering state 30 +Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) -> $$ = nterm exp (1.2-6: 2) Entering state 12 +Stack now 0 4 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) -Entering state 23 +Entering state 22 +Stack now 0 8 22 Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) -Entering state 20 +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 29 +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) -> $$ = nterm exp (1.12-16: 0) Entering state 12 +Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Entering state 31 +Stack now 0 8 22 31 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack by rule 10 (line 93): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) 1.11-17: error: null divisor -> $$ = nterm exp (1.1-17: 2) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + #) = 1111 +input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (1 + #) = 1111 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) -Entering state 21 +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) -> $$ = nterm exp (1.6: 1) -Entering state 30 +Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) -> $$ = nterm exp (1.2-6: 2) Entering state 12 +Stack now 0 4 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) -Entering state 23 +Entering state 22 +Stack now 0 8 22 Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) -> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) -Entering state 20 +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 29 +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack by rule 8 (line 91): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) -> $$ = nterm exp (1.12-16: 0) Entering state 12 +Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) -> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Entering state 31 +Stack now 0 8 22 31 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): +Reducing stack by rule 10 (line 93): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) 1.11-17: error: null divisor -> $$ = nterm exp (1.1-17: 2) Entering state 8 +Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 78): +Stack now 0 7 +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -521. calc.at:1416: ok - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1443: mv calc.y.tmp calc.y - -./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -stdout: -./calc.at:1431: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1431: $PREPARSER ./calc input -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1431: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | 1//2 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | error -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124415,16 +142113,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124434,47 +142126,108 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: cat stderr -input: - | - | +1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1432: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -syntax error -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1432: $PREPARSER ./calc input +./calc.at:1438: cat stderr stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124484,24 +142237,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1431: cat stderr +./calc.at:1440: cat stderr input: - | 1 2 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc /dev/null -stderr: -stderr: -1.3: syntax error -syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error -syntax error -./calc.at:1432: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1435: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124511,7 +142252,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124521,165 +142262,259 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: cat stderr -input: -./calc.at:1431: cat stderr - | 1//2 -stderr: -./calc.at:1432: $PREPARSER ./calc input -stdout: -./calc.at:1433: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - input: stderr: -1.3: syntax error - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: $PREPARSER ./calc input +./calc.at:1441: cat stderr + | (# + 1) = 1111 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc input +530. calc.at:1440: ok +./calc.at:1426: cat stderr stderr: stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -1.3: syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1433: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: cat stderr -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1431: cat stderr -input: -stderr: - | 1 2 -./calc.at:1433: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input -1.3: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -stderr: -stderr: -syntax error -error: 2222 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -error: 2222 != 1 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1432: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc input input: - | 1//2 -stderr: -./calc.at:1433: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1431: cat stderr + | (* *) + (*) + (*) +./calc.at:1426: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' stderr: -1.7: syntax error -1.3: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -1.3: syntax error - | (- *) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124689,69 +142524,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error + syntax error -error: 2222 != 1 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: syntax error syntax error -error: 2222 != 1 -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -input: - | - | +1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1433: $PREPARSER ./calc input -stderr: -2.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1431: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124761,123 +142540,209 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr syntax error syntax error syntax error -./calc.at:1432: $PREPARSER ./calc /dev/null -input: -stderr: - | 1 = 2 = 3 -./calc.at:1433: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.7: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1432: cat stderr -stderr: -./calc.at:1433: cat stderr -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | - | +1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: cat stderr input: -2.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1431: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -stderr: -2.1: syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124887,113 +142752,304 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc /dev/null -./calc.at:1431: cat stderr -./calc.at:1432: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1431: $PREPARSER ./calc input -stderr: -1.1: syntax error -stderr: + | (1 + # + 1) = 1111 +./calc.at:1435: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: cat stderr -./calc.at:1432: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1432: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1431: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + #) = 1111 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + # + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125003,22 +143059,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -input: - | (* *) + (*) + (*) -input: - | (!!) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1432: $PREPARSER ./calc input -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125028,19 +143069,242 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error -1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr +./calc.at:1438: cat stderr stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1431: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125051,9 +143315,9 @@ }eg ' expout || exit 77 input: - | (# + 1) = 1111 -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125063,139 +143327,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +529. calc.at:1438: ok stderr: -./calc.at:1433: cat stderr -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1426: cat stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr stderr: -syntax error: invalid character: '#' -./calc.at:1432: cat stderr +1.11-17: error: null divisor +526. calc.at:1434: ok input: - | (- *) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1441: $PREPARSER ./calc input + input: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... | 1 + 2 * 3 + !+ ++ -./calc.at:1432: $PREPARSER ./calc input -stderr: -./calc.at:1431: cat stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -input: -./calc.at:1433: cat stderr - | (1 + # + 1) = 1111 - | 1 + 2 * 3 + !- ++ -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1432: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1434: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +./calc.at:1446: mv calc.y.tmp calc.y -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1433: $PREPARSER ./calc input -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -syntax error: invalid character: '#' -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -stderr: -input: -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1432: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125206,12 +143356,294 @@ }eg ' expout || exit 77 stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: cat stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1431: cat stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125221,112 +143653,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1433: cat stderr - | 1 2 -./calc.at:1434: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1431: $PREPARSER ./calc input -stderr: -input: -1.3: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor - | 1 + 2 * 3 + !+ ++ -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -stderr: -1.3: syntax error -stderr: -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -input: - | (1 + #) = 1111 +527. calc.at:1435: ok +./calc.at:1441: cat stderr +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +531. calc.at:1441: ok +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -stderr: -1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: cat stderr -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1431: cat stderr -stderr: -1.3: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1432: cat stderr -stdout: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -523. calc.at:1431: ok -1.3: syntax error -./calc.at:1437: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... +./calc.at:1448: mv calc.y.tmp calc.y -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... +./calc.at:1449: mv calc.y.tmp calc.y + +./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125336,86 +143688,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1433: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1426: cat stderr +537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... +./calc.at:1451: mv calc.y.tmp calc.y -./calc.at:1437: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +523. calc.at:1431: 538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... + ok +./calc.at:1453: mv calc.y.tmp calc.y + +./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y | (#) + (#) = 2222 -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1434: cat stderr -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1437: $PREPARSER ./calc input - | error -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1432: cat stderr -1.3: syntax error, unexpected number -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1433: cat stderr -input: -1.1: syntax error - | (1 + # + 1) = 1111 -./calc.at:1437: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125425,50 +143718,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: $PREPARSER ./calc input -stderr: + +./calc.at:1426: cat stderr +./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS input: -1.6: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + #) = 1111 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: cat stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1426: $PREPARSER ./calc input stderr: - | 1//2 -./calc.at:1434: cat stderr -./calc.at:1437: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125478,95 +143742,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | 1 = 2 = 3 -./calc.at:1434: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1432: cat stderr -stderr: -1.7: syntax error -533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: mv calc.y.tmp calc.y +539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... +./calc.at:1454: mv calc.y.tmp calc.y -./calc.at:1433: cat stderr -input: -stderr: -./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.7: syntax error - | (1 + 1) / (1 - 1) -./calc.at:1432: $PREPARSER ./calc input -stderr: +./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1426: cat stderr input: -1.11-17: error: null divisor -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (# + 1) = 1111 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.11-17: error: null divisor -1.2: syntax error: invalid character: '#' -input: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1437: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: cat stderr -1.1: syntax error, unexpected invalid token -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: $PREPARSER ./calc input stderr: -input: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -1.1: syntax error, unexpected invalid token -./calc.at:1434: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125576,43 +143765,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: cat stderr -stderr: -2.1: syntax error -./calc.at:1437: cat stderr -./calc.at:1433: cat stderr 524. calc.at:1432: ok + +./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1426: cat stderr input: -input: - | 1 = 2 = 3 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | (1 + # + 1) = 1111 -./calc.at:1433: $PREPARSER ./calc input -stderr: +./calc.at:1426: $PREPARSER ./calc input stderr: -1.7: syntax error, unexpected '=' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1455: mv calc.y.tmp calc.y -1.7: syntax error, unexpected '=' -stderr: -./calc.at:1434: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1434: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125622,35 +143791,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error +./calc.at:1426: cat stderr +./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: + | (1 + 1) / (1 - 1) +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1438: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +error: null divisor +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -./calc.at:1437: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +error: null divisor +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125660,60 +143811,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: -./calc.at:1434: cat stderr - | - | +1 -./calc.at:1437: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1433: $PREPARSER ./calc input -input: -2.1: syntax error, unexpected '+' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: $PREPARSER ./calc input -stderr: -stderr: -stderr: -2.1: syntax error, unexpected '+' -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -1.11-17: error: null divisor -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1426: cat stderr +522. calc.at:1426: ok + +541. calc.at:1457: testing Calculator C++ parse.error=custom ... +./calc.at:1457: mv calc.y.tmp calc.y + +./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: stdout: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -1.11-17: error: null divisor -./calc.at:1426: "$PERL" -ne ' +./calc.at:1445: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -125722,28 +143829,8 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -125758,2180 +143845,2092 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: $PREPARSER ./calc input -stderr: +./calc.at:1445: $PREPARSER ./calc input stderr: -./calc.at:1437: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) +Next token is token ')' () Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) +Next token is token '-' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) +Next token is token ')' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1437: $PREPARSER ./calc /dev/null -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1434: cat stderr -./calc.at:1433: cat stderr -stderr: -./calc.at:1446: mv calc.y.tmp calc.y - -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) +Next token is token ')' () Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) +Next token is token '-' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) +Next token is token ')' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.1: syntax error, unexpected end of input -input: - | (!!) + (1 2) = 1 -./calc.at:1434: $PREPARSER ./calc input -525. calc.at:1433: input: - ok -stderr: - | 1 2 -./calc.at:1426: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -stderr: | 1 2 -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: +./calc.at:1445: $PREPARSER ./calc input stderr: - Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token number (2) Stack now 0 -syntax error -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token number (2) Stack now 0 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1437: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -127941,137 +145940,109 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1445: cat stderr stderr: -./calc.at:1434: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1438: cat stderr -./calc.at:1426: cat stderr -input: +stdout: +./calc.at:1454: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + input: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 | 1//2 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1437: cat stderr -stderr: +./calc.at:1445: $PREPARSER ./calc input input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1454: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '/' () +Shifting token '/' () Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '/' () Stack now 0 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1426: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '/' () +Shifting token '/' () Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '/' () Stack now 0 +input: + | 1 2 +./calc.at:1454: $PREPARSER ./calc input stderr: +1.3: syntax error, unexpected number +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -syntax error -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected number +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128081,11 +146052,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... -./calc.at:1448: mv calc.y.tmp calc.y - -./calc.at:1438: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1445: cat stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128095,76 +146063,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: cat stderr -./calc.at:1437: cat stderr +./calc.at:1454: cat stderr input: -./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y | error -./calc.at:1438: $PREPARSER ./calc input -input: -input: - | (- *) + (1 2) = 1 - | (* *) + (*) + (*) -./calc.at:1434: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: -./calc.at:1437: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () Stack now 0 -./calc.at:1426: cat stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error + | 1//2 +./calc.at:1454: $PREPARSER ./calc input stderr: stderr: -./calc.at:1426: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () Stack now 0 -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128174,7 +146104,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128184,127 +146114,124 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: cat stderr -./calc.at:1437: cat stderr +./calc.at:1454: cat stderr +input: +./calc.at:1445: cat stderr + | error +./calc.at:1454: $PREPARSER ./calc input stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: +stderr: | 1 = 2 = 3 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1440: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1434: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: +stderr: +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Error: popping token '=' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '=' () Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.1: syntax error, unexpected invalid token stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Error: popping token '=' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '=' () Stack now 0 +input: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -128318,123 +146245,11 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | 1 + 2 * 3 + !+ ++ -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1440: $PREPARSER ./calc input -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1454: cat stderr input: | 1 = 2 = 3 -./calc.at:1426: $PREPARSER ./calc input -stderr: -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -syntax error -./calc.at:1438: cat stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -input: -syntax error - | 1 + 2 * 3 + !- ++ - | - | +1 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1437: cat stderr -stderr: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1437: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 +./calc.at:1454: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -128444,7 +146259,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -128459,7 +146274,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -128474,14 +146289,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -128489,7 +146306,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -128498,6 +146315,7 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -128505,14 +146323,16 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -128521,15 +146341,16 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -128539,7 +146360,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -128554,7 +146375,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -128574,21 +146395,23 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): +Reducing stack by rule 9 (line 105): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -128596,7 +146419,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -128605,6 +146428,7 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -128617,21 +146441,23 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -128640,15 +146466,16 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -128659,12 +146486,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -128680,7 +146507,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -128695,14 +146522,16 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -128710,7 +146539,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -128718,6 +146547,7 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -128730,21 +146560,23 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -128753,15 +146585,16 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -128782,14 +146615,16 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -128797,9 +146632,10 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -128816,14 +146652,16 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -128832,6 +146670,7 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -128839,14 +146678,16 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -128855,15 +146696,16 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -128874,12 +146716,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -128905,28 +146747,30 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -128934,6 +146778,7 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -128946,21 +146791,23 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -128969,15 +146816,16 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -128988,12 +146836,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -129004,7 +146852,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -129019,14 +146867,16 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -129035,6 +146885,7 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -129042,14 +146893,16 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -129058,6 +146911,7 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129070,21 +146924,23 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -129093,15 +146949,16 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -129112,7 +146969,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -129132,7 +146989,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -129147,14 +147004,16 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -129163,9 +147022,10 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -129174,7 +147034,9 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -129183,6 +147045,7 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129190,14 +147053,16 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -129206,15 +147071,16 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -129225,12 +147091,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -129241,7 +147107,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -129256,7 +147122,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -129271,14 +147137,16 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -129286,7 +147154,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -129295,6 +147163,7 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129302,14 +147171,16 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -129318,15 +147189,16 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -129342,7 +147214,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -129357,14 +147229,16 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -129373,9 +147247,10 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -129392,14 +147267,16 @@ Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -129408,6 +147285,7 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129415,14 +147293,16 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -129431,35 +147311,31 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) +Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -129469,6 +147345,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1445: cat stderr Starting parse Entering state 0 Stack now 0 @@ -129477,7 +147358,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -129492,7 +147373,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -129507,14 +147388,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -129522,7 +147405,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -129531,6 +147414,7 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -129538,14 +147422,16 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -129554,15 +147440,16 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -129572,7 +147459,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -129587,7 +147474,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -129607,21 +147494,23 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): +Reducing stack by rule 9 (line 105): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -129629,7 +147518,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -129638,6 +147527,7 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129650,21 +147540,23 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -129673,15 +147565,16 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -129692,12 +147585,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -129713,7 +147606,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -129728,14 +147621,16 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -129743,7 +147638,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -129751,6 +147646,7 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129763,21 +147659,23 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -129786,15 +147684,16 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -129815,14 +147714,16 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -129830,9 +147731,10 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -129849,14 +147751,16 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -129865,6 +147769,7 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129872,14 +147777,16 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -129888,15 +147795,16 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -129907,12 +147815,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -129938,28 +147846,30 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -129967,6 +147877,7 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -129979,21 +147890,23 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -130002,15 +147915,16 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -130021,12 +147935,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -130037,7 +147951,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -130052,14 +147966,16 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -130068,6 +147984,7 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -130075,14 +147992,16 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -130091,6 +148010,7 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -130103,21 +148023,23 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -130126,15 +148048,16 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -130145,7 +148068,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -130165,7 +148088,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -130180,14 +148103,16 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -130196,9 +148121,10 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -130207,7 +148133,9 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -130216,6 +148144,7 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -130223,14 +148152,16 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -130239,15 +148170,16 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -130258,12 +148190,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -130274,7 +148206,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -130289,7 +148221,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -130304,14 +148236,16 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -130319,7 +148253,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -130328,6 +148262,7 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -130335,14 +148270,16 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -130351,15 +148288,16 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -130375,7 +148313,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -130390,14 +148328,16 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -130406,9 +148346,10 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -130425,14 +148366,16 @@ Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -130441,6 +148384,7 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -130448,14 +148392,16 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -130464,29 +148410,67 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) +Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +input: + | 1 2 +./calc.at:1455: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1445: $PREPARSER ./calc input +1.7: syntax error, unexpected '=' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Stack now 0 +Cleanup: discarding lookahead token '+' () +Stack now 0 +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130496,57 +148480,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1426: cat stderr -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1438: cat stderr -input: -./calc.at:1434: cat stderr - | - | +1 -input: -./calc.at:1426: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1438: $PREPARSER ./calc /dev/null -./calc.at:1437: $PREPARSER ./calc input -stderr: -syntax error -input: -stderr: -stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) Stack now 0 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -stderr: -./calc.at:1440: $PREPARSER ./calc input -stderr: -stderr: -syntax error +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Stack now 0 +Cleanup: discarding lookahead token '+' () Stack now 0 +./calc.at:1454: cat stderr Starting parse Entering state 0 Stack now 0 @@ -130555,23 +148547,65 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | + | +1 +./calc.at:1454: $PREPARSER ./calc input +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: cat stderr +stderr: +./calc.at:1445: cat stderr +2.1: syntax error, unexpected '+' +input: + | 1//2 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc /dev/null stderr: stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -130580,19 +148614,52 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1437: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130602,7 +148669,61 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1454: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +./calc.at:1454: $PREPARSER ./calc /dev/null +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130612,7 +148733,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: "$PERL" -pi -e 'use strict; +stderr: +1.1: syntax error, unexpected end of file +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +input: +stderr: + | error +1.1: syntax error, unexpected end of file +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130622,8 +148752,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130633,7 +148788,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1443: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1445: cat stderr +./calc.at:1454: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130643,715 +148837,664 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1440: cat stderr input: -./calc.at:1437: $PREPARSER ./calc input | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1426: cat stderr -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: cat stderr -stderr: -./calc.at:1426: $PREPARSER ./calc /dev/null -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: +./calc.at:1455: cat stderr +./calc.at:1454: $PREPARSER ./calc input stderr: -syntax error - | 1//2 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | (1 + #) = 1111 -./calc.at:1434: $PREPARSER ./calc input stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) +Error: popping nterm exp (3) Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '*' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) +Error: popping nterm exp (2) Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: + | 1 = 2 = 3 +./calc.at:1455: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) +Error: popping nterm exp (3) Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '*' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) +Error: popping nterm exp (2) Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: Starting parse Entering state 0 Stack now 0 @@ -131360,29 +149503,49 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1437: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131392,7 +149555,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131402,7 +149565,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1445: cat stderr +./calc.at:1454: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131413,363 +149628,311 @@ }eg ' expout || exit 77 input: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1440: cat stderr -./calc.at:1438: cat stderr -stderr: -./calc.at:1426: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1434: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1443: $PREPARSER ./calc input input: -stderr: - | error -./calc.at:1440: $PREPARSER ./calc input | (!!) + (1 2) = 1 -./calc.at:1438: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -input: +./calc.at:1455: cat stderr +./calc.at:1454: $PREPARSER ./calc input + | (!!) + (1 2) = 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1426: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: $PREPARSER ./calc input input: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1434: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1455: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) +Next token is token '!' () +Shifting token '!' () Entering state 15 Stack now 0 4 5 15 Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) + $1 = token '!' () + $2 = token '!' () Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -1.2: syntax error: invalid character: '#' -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -131779,513 +149942,1066 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: cat stderr -./calc.at:1440: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1438: cat stderr -1.2: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: $PREPARSER ./calc input -stderr: -input: -stderr: -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1434: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | (1 + # + 1) = 1111 -./calc.at:1434: $PREPARSER ./calc input -stderr: -input: - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 -Stack now 0 4 2 +Stack now 0 6 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: $PREPARSER ./calc input -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error -error: 2222 != 1 -stderr: -./calc.at:1440: cat stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1437: cat stderr -stderr: -syntax error -error: 2222 != 1 -input: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1437: $PREPARSER ./calc input - | - | +1 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: -./calc.at:1438: cat stderr Starting parse Entering state 0 Stack now 0 @@ -132294,26 +151010,37 @@ Shifting token '\n' (1.1-2.0: ) Entering state 3 Stack now 0 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132323,55 +151050,1137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -input: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (* *) + (*) + (*) -./calc.at:1438: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: - | (1 + 1) / (1 - 1) -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1426: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | (- *) + (1 2) = 1 +./calc.at:1454: $PREPARSER ./calc input + | 1 2 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1455: cat stderr stderr: stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1445: cat stderr +./calc.at:1455: $PREPARSER ./calc /dev/null +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1445: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132381,420 +152190,364 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.11-17: error: null divisor -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr -./calc.at:1437: cat stderr -stderr: -input: +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: cat stderr +./calc.at:1454: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (- *) + (1 2) = 1 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1440: $PREPARSER ./calc /dev/null -stderr: -input: -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -1.11-17: error: null divisor -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -stderr: -1.11-17: error: null divisor -./calc.at:1438: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -526. calc.at:1434: ok -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token end of input () +Cleanup: popping nterm input () input: -./calc.at:1440: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1437: cat stderr -./calc.at:1426: cat stderr + | (* *) + (*) + (*) input: - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1454: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | 1//2 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr stderr: -./calc.at:1440: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -132809,77 +152562,28 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -528. calc.at:1437: input: - ok +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 stderr: - | (* *) + (*) + (*) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1455: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -132894,82 +152598,32 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: cat stderr stderr: input: -stderr: -syntax error -syntax error -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - Starting parse Entering state 0 Stack now 0 @@ -132980,7 +152634,23 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -132988,7 +152658,7 @@ Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -133010,7 +152680,7 @@ Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 @@ -133025,14 +152695,16 @@ Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -133041,6 +152713,7 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token @@ -133048,14 +152721,16 @@ Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -133064,15 +152739,32 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133080,7 +152772,7 @@ Shifting token ')' (1.18: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -133089,7 +152781,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -133098,6 +152792,7 @@ Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -133107,30 +152802,55 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.23: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.25: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.27: ) Error: popping token error (1.23-25: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133139,7 +152859,7 @@ Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -133148,7 +152868,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -133157,6 +152879,7 @@ Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -133169,7 +152892,7 @@ Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 @@ -133184,14 +152907,26 @@ Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) +Reducing stack by rule 5 (line 92): + $./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -133200,22 +152935,42 @@ Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift Entering state 21 Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.41: ) Error: popping token error (1.33-41: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133224,7 +152979,7 @@ Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -133233,7 +152988,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -133242,6 +152999,7 @@ Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -133249,14 +153007,16 @@ Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -133266,35 +153026,34 @@ Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1454: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1438: $PREPARSER ./calc input stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -133305,7 +153064,23 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -133313,7 +153088,7 @@ Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -133335,7 +153110,7 @@ Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 @@ -133350,14 +153125,16 @@ Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -133366,6 +153143,7 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token @@ -133373,14 +153151,16 @@ Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -133389,15 +153169,32 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133405,7 +153202,7 @@ Shifting token ')' (1.18: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -133414,7 +153211,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -133423,6 +153222,7 @@ Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -133432,30 +153232,55 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.23: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.25: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.27: ) Error: popping token error (1.23-25: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133464,7 +153289,7 @@ Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -133473,7 +153298,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -133482,6 +153309,7 @@ Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -133494,7 +153322,7 @@ Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 @@ -133509,14 +153337,16 @@ Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -133525,22 +153355,42 @@ Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift Entering state 21 Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.41: ) Error: popping token error (1.33-41: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133549,7 +153399,7 @@ Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -133558,7 +153408,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -133567,6 +153419,7 @@ Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -133574,14 +153427,16 @@ Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -133591,202 +153446,34 @@ Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1443: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | error +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133796,7 +153483,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133806,11 +153493,84 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: -./calc.at:1438: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1445: cat stderr +./calc.at:1443: $PREPARSER ./calc input +stdout: +./calc.at:1453: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1455: cat stderr +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1448: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (!!) + (1 2) = 1 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1426: cat stderr +./calc.at:1455: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (* *) + (*) + (*) stderr: Starting parse Entering state 0 @@ -133830,7 +153590,7 @@ Shifting token '!' (1.3: ) Entering state 15 Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): +Reducing stack by rule 16 (line 120): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -133842,7 +153602,7 @@ Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -133864,23 +153624,36 @@ Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.11: 2) Error: popping token error (1.9-11: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 @@ -133889,7 +153662,7 @@ Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -133898,7 +153671,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -133907,6 +153682,7 @@ Stack now 0 8 Next token is token '=' (1.14: ) Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -133914,14 +153690,16 @@ Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) @@ -133931,34 +153709,221 @@ Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -input: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ - | (#) + (#) = 2222 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1426: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1448: $PREPARSER ./calc input +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: cat stderr stderr: Starting parse Entering state 0 @@ -133978,7 +153943,7 @@ Shifting token '!' (1.3: ) Entering state 15 Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): +Reducing stack by rule 16 (line 120): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -133990,7 +153955,7 @@ Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -134012,23 +153977,36 @@ Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.11: 2) Error: popping token error (1.9-11: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 @@ -134037,7 +154015,7 @@ Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -134046,7 +154024,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -134055,6 +154035,7 @@ Stack now 0 8 Next token is token '=' (1.14: ) Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -134062,14 +154043,16 @@ Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-12: 2222) $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) @@ -134079,156 +154062,193 @@ Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | 1 2 +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1453: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1454: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected number +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134238,139 +154258,142 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +1.3: syntax error, unexpected number + | 1 = 2 = 3 + | 1 2 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.3: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +stderr: +./calc.at:1453: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1449: mv calc.y.tmp calc.y - -./calc.at:1440: cat stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1454: cat stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 + 2 * 3 + !- ++ -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134381,17 +154404,27 @@ }eg ' expout || exit 77 input: -./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... -stderr: -./calc.at:1451: mv calc.y.tmp calc.y - | (- *) + (1 2) = 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input + | 1//2 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1445: cat stderr + | (1 + #) = 1111 stderr: -./calc.at:1438: cat stderr -./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1454: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -134408,11 +154441,27 @@ Stack now 0 4 2 Reading a token Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): +Reducing stack by rule 15 (line 119): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -134420,9 +154469,12 @@ Entering state 11 Stack now 0 4 11 Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 @@ -134431,7 +154483,7 @@ Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -134453,23 +154505,36 @@ Shifting token number (1.10: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 @@ -134478,7 +154543,7 @@ Shifting token ')' (1.13: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -134487,7 +154552,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -134496,6 +154563,7 @@ Stack now 0 8 Next token is token '=' (1.15: ) Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -134503,14 +154571,16 @@ Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -134520,31 +154590,39 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +stderr: +stderr: input: - | (1 + #) = 1111 +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1445: $PREPARSER ./calc input +input: Starting parse Entering state 0 Stack now 0 @@ -134560,11 +154638,27 @@ Stack now 0 4 2 Reading a token Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): +Reducing stack by rule 15 (line 119): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -134572,9 +154666,12 @@ Entering state 11 Stack now 0 4 11 Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 @@ -134583,7 +154680,7 @@ Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -134605,23 +154702,36 @@ Shifting token number (1.10: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err Error: discarding token number (1.12: 2) Error: popping token error (1.10-12: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 @@ -134630,7 +154740,7 @@ Shifting token ')' (1.13: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -134639,7 +154749,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -134648,6 +154760,7 @@ Stack now 0 8 Next token is token '=' (1.15: ) Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -134655,14 +154768,16 @@ Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-13: 2222) $2 = token '=' (1.15: ) $3 = nterm exp (1.17: 1) @@ -134672,122 +154787,143 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: $PREPARSER ./calc input +./calc.at:1448: cat stderr stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1443: $PREPARSER ./calc input + | 1//2 +./calc.at:1448: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1449: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -134797,15 +154933,14 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.6: syntax error: invalid character: '#' +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134815,91 +154950,122 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.3: syntax error +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -134909,18 +155075,39 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1426: cat stderr - | (* *) + (*) + (*) -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1449: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1445: $PREPARSER ./calc input +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr +stderr: +1.3: syntax error +./calc.at:1455: cat stderr +./calc.at:1454: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134931,9 +155118,101 @@ }eg ' expout || exit 77 input: - | (#) + (#) = 2222 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1438: cat stderr +input: +stderr: + | error +stderr: +./calc.at:1453: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: $PREPARSER ./calc input stderr: stderr: Starting parse @@ -134946,22 +155225,44 @@ Stack now 0 4 Reading a token Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 @@ -134970,7 +155271,7 @@ Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -134989,14 +155290,33 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.10: ) Error: popping token error (1.10: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 @@ -135005,7 +155325,7 @@ Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -135014,7 +155334,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -135023,6 +155345,7 @@ Stack now 0 8 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135032,14 +155355,33 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.16: ) Error: popping token error (1.16: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 @@ -135048,7 +155390,7 @@ Shifting token ')' (1.17: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -135057,7 +155399,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -135066,37 +155410,127 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) input: stderr: - | (# + 1) = 1111 stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' + | 1 2 +./calc.at:1443: cat stderr Starting parse Entering state 0 Stack now 0 @@ -135107,22 +155541,44 @@ Stack now 0 4 Reading a token Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 @@ -135131,7 +155587,7 @@ Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -135150,14 +155606,33 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.10: ) Error: popping token error (1.10: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 @@ -135166,7 +155641,7 @@ Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -135175,7 +155650,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -135184,6 +155661,7 @@ Stack now 0 8 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135193,14 +155671,33 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err Error: discarding token '*' (1.16: ) Error: popping token error (1.16: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 @@ -135209,7 +155706,7 @@ Shifting token ')' (1.17: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -135218,7 +155715,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -135227,138 +155726,259 @@ Stack now 0 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: $PREPARSER ./calc input +./calc.at:1449: $PREPARSER ./calc input +1.1: syntax error, unexpected invalid token +stderr: +./calc.at:1443: $PREPARSER ./calc /dev/null +./calc.at:1448: cat stderr +1.2: syntax error: invalid character: '#' +stderr: +1.3: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1445: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error +1.1: syntax error +input: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (#) + (#) = 2222 +./calc.at:1445: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1454: cat stderr +./calc.at:1453: cat stderr +stderr: +1.1: syntax error +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Stack now 0 4 -Shifting token error (1.1-4: ) +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1454: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1453: $PREPARSER ./calc input stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1455: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135372,103 +155992,128 @@ Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135478,9 +156123,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: cat stderr stderr: -./calc.at:1438: cat stderr +./calc.at:1449: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -135489,7 +156137,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -135504,7 +156152,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -135519,14 +156167,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -135534,7 +156184,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -135543,6 +156193,7 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135555,19 +156206,25 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Reducing stack by rule 17 (line 121): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr input: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1426: $PREPARSER ./calc input -stderr: -./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + | 1//2 +./calc.at:1449: $PREPARSER ./calc input input: +stderr: +stderr: +1.7: syntax error, unexpected '=' +stderr: +1.6: syntax error: invalid character: '#' +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 Starting parse Entering state 0 Stack now 0 @@ -135576,7 +156233,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -135591,7 +156248,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -135606,14 +156263,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -135621,7 +156280,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -135630,6 +156289,7 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135642,24 +156302,23 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Reducing stack by rule 17 (line 121): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1443: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 +./calc.at:1448: $PREPARSER ./calc input input: -./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -syntax error: invalid character: '#' | 1 + 2 * 3 + !- ++ -./calc.at:1440: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -135670,96 +156329,655 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr +stderr: +1.7: syntax error +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error (1.2-10: ) +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -135776,8 +156994,6 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -135786,7 +157002,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -135801,7 +157017,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -135816,14 +157032,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -135831,7 +157049,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -135840,6 +157058,7 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135852,15 +157071,37 @@ Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): +Reducing stack by rule 18 (line 122): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +input: +input: + | (1 + #) = 1111 +./calc.at:1445: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +./calc.at:1454: $PREPARSER ./calc input + | + | +1 +./calc.at:1453: $PREPARSER ./calc input +1.7: syntax error Starting parse Entering state 0 Stack now 0 @@ -135869,7 +157110,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -135884,7 +157125,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -135899,14 +157140,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -135914,7 +157157,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -135923,6 +157166,7 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -135935,15 +157179,13 @@ Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): +Reducing stack by rule 18 (line 122): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stdout: -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135953,133 +157195,247 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +stderr: +stderr: +1.11-17: error: null divisor +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1449: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1443: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1455: cat stderr +stderr: +stderr: +input: + | (!!) + (1 2) = 1 +./calc.at:1443: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +1.11-17: error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Stack now 0 4 -Shifting token error (1.2-8: ) +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136089,7 +157445,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136099,33 +157455,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1448: cat stderr +stderr: input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1440: cat stderr -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1426: cat stderr -./calc.at:1438: cat stderr -input: -input: - | (1 + 1) / (1 - 1) | (#) + (#) = 2222 -./calc.at:1438: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1440: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -136136,122 +157472,282 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 4 12 20 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | + | +1 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1454: cat stderr +./calc.at:1453: cat stderr +1.1: syntax error +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -136268,11 +157764,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +2.1: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -136288,9 +157782,12 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.2: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 @@ -136299,7 +157796,7 @@ Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.1-2: ) $3 = token ')' (1.3: ) @@ -136323,9 +157820,12 @@ Entering state 11 Stack now 0 8 20 4 11 Next token is token invalid token (1.8: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.8: ) Error: popping token error (1.1-8: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 @@ -136334,7 +157834,7 @@ Shifting token ')' (1.9: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.7: ) $2 = token error (1.1-8: ) $3 = token ')' (1.9: ) @@ -136343,7 +157843,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -136352,6 +157854,7 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -136359,14 +157862,16 @@ Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -136375,172 +157880,34 @@ Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $PREPARSER ./calc /dev/null +./calc.at:1445: cat stderr stderr: +539. calc.at:1454: ok + Starting parse Entering state 0 Stack now 0 @@ -136556,9 +157923,12 @@ Entering state 11 Stack now 0 4 11 Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.2: ) Error: popping token error (1.1-2: ) Stack now 0 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 @@ -136567,7 +157937,7 @@ Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.1: ) $2 = token error (1.1-2: ) $3 = token ')' (1.3: ) @@ -136591,9 +157961,12 @@ Entering state 11 Stack now 0 8 20 4 11 Next token is token invalid token (1.8: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err Error: discarding token invalid token (1.8: ) Error: popping token error (1.1-8: ) Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 @@ -136602,7 +157975,7 @@ Shifting token ')' (1.9: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack by rule 14 (line 118): $1 = token '(' (1.7: ) $2 = token error (1.1-8: ) $3 = token ')' (1.9: ) @@ -136611,7 +157984,9 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -136620,6 +157995,7 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -136627,14 +158003,16 @@ Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) $3 = nterm exp (1.13-16: 2222) @@ -136643,29 +158021,32 @@ Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -./calc.at:1438: "$PERL" -pi -e 'use strict; +stderr: +2.1: syntax error +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136675,7 +158056,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of input +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (# + 1) = 1111 +1.1: syntax error, unexpected end of input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1455: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136690,1123 +158109,1357 @@ Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +./calc.at:1448: cat stderr + | (1 + #) = 1111 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +stderr: +./calc.at:1448: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1453: cat stderr +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1453: $PREPARSER ./calc input +stdout: +./calc.at:1457: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1443: $PREPARSER ./calc input +542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1458: mv calc.y.tmp calc.y + +./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1449: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.1: syntax error + | 1 = 2 = 3 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 -Stack now 0 6 2 2 +Stack now 0 4 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 +Stack now 0 8 18 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +1.7: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 +Stack now 0 8 18 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.7: syntax error +./calc.at:1445: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 2 +./calc.at:1457: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1455: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1453: cat stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1443: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 +Stack now 0 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 19 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 6 8 19 4 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +input: +./calc.at:1448: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 +Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (!!) + (1 2) = 1 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 +Stack now 0 4 12 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: cat stderr -./calc.at:1440: cat stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Reading a token +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -137816,43 +159469,1906 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 +Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | + | +1 +stderr: +./calc.at:1449: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1457: cat stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +2.1: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1//2 +./calc.at:1457: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1451: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1451: $PREPARSER ./calc input +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: + | (1 + # + 1) = 1111 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1443: cat stderr +stderr: +input: +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1449: cat stderr +stderr: + | (- *) + (1 2) = 1 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1448: cat stderr +./calc.at:1449: $PREPARSER ./calc /dev/null +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1457: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1445: $PREPARSER ./calc input +input: +1.1: syntax error +stderr: +input: +input: + | 1 + 2 * 3 + !+ ++ +stderr: + | 1 2 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | error +stderr: +stderr: +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' () +Reducing stack by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.3: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' () +Reducing stack by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: cat stderr +./calc.at:1453: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1449: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1443: $PREPARSER ./calc input +input: +stderr: + | (* *) + (*) + (*) +./calc.at:1453: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1457: cat stderr +stdout: +./calc.at:1446: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1448: cat stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +stderr: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 = 2 = 3 +input: +./calc.at:1457: $PREPARSER ./calc input +input: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1//2 + | (- *) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input +stderr: +./calc.at:1451: $PREPARSER ./calc input +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1446: $PREPARSER ./calc input +stderr: +./calc.at:1445: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): @@ -138734,17 +162250,1561 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr -529. calc.at:1438: ok +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1443: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.3: syntax error +./calc.at:1455: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1453: cat stderr input: -./calc.at:1426: cat stderr - | (1 + #) = 1111 -./calc.at:1440: $PREPARSER ./calc input input: | 1 2 -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr +stderr: +540. calc.at:1455: ok +./calc.at:1457: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1449: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +./calc.at:1448: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 + | + | +1 +./calc.at:1457: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +input: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +input: + | error +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1448: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +1.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1446: cat stderr +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... +./calc.at:1459: mv calc.y.tmp calc.y + +./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | 1//2 +./calc.at:1446: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1443: cat stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1457: cat stderr + | (1 + #) = 1111 +./calc.at:1449: cat stderr +./calc.at:1443: $PREPARSER ./calc input +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: $PREPARSER ./calc /dev/null +input: stderr: Starting parse Entering state 0 @@ -138845,33 +163905,30 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1426: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1448: cat stderr +stderr: stderr: stderr: stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1453: cat stderr Starting parse Entering state 0 Stack now 0 @@ -138971,8 +164028,6 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -138987,16 +164042,34 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +input: +./calc.at:1448: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1451: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' - -./calc.at:1440: "$PERL" -pi -e 'use strict; +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +input: +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139006,7 +164079,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.7: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1453: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139016,8 +164100,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139027,16 +164114,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: cat stderr +1.7: syntax error +stderr: +./calc.at:1443: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1457: cat stderr +input: input: + | error | (# + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input input: - | 1//2 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1426: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1448: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1449: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -139133,37 +164264,49 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1453: cat stderr +stderr: stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1451: cat stderr Starting parse Entering state 0 Stack now 0 @@ -139260,7 +164403,104 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1451: $PREPARSER ./calc input +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1448: cat stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +input: +./calc.at:1457: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +stderr: +./calc.at:1443: cat stderr +./calc.at:1448: $PREPARSER ./calc input +input: +2.1: syntax error + | 1 = 2 = 3 +./calc.at:1446: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 +stderr: + | (1 + # + 1) = 1111 +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: $PREPARSER ./calc input stderr: +./calc.at:1449: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1453: cat stderr Starting parse Entering state 0 Stack now 0 @@ -139275,25 +164515,38 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 - | (1 + 1) / (1 - 1) -./calc.at:1426: $PREPARSER ./calc input stderr: -error: null divisor -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139303,7 +164556,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !+ ++ + | (# + 1) = 1111 +stderr: +./calc.at:1449: $PREPARSER ./calc input +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1451: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +stderr: +./calc.at:1451: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +1.2: syntax error: invalid character: '#' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139314,26 +164626,6 @@ }eg ' expout || exit 77 stderr: -error: null divisor -./calc.at:1441: cat stderr -./calc.at:1440: cat stderr -input: -input: - | error - | (1 + # + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -139449,31 +164741,25 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... -./calc.at:1453: mv calc.y.tmp calc.y - -./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1457: cat stderr +1.2: syntax error: invalid character: '#' +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +stderr: + | (1 + #) = 1111 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1448: $PREPARSER ./calc input +1.1: syntax error +input: stderr: +stderr: + | (- *) + (1 2) = 1 +1.6: syntax error: invalid character: '#' +input: Starting parse Entering state 0 Stack now 0 @@ -139589,7 +164875,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139599,8 +164891,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 + | + | +1 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -139610,151 +164908,333 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: cat stderr -input: -./calc.at:1440: cat stderr - | 1 = 2 = 3 -./calc.at:1441: $PREPARSER ./calc input -522. calc.at:1426: input: - ok stderr: - | (1 + 1) / (1 - 1) -./calc.at:1440: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +stderr: +./calc.at:1453: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +input: +./calc.at:1451: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1443: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token +input: +1.6: syntax error: invalid character: '#' +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1448: cat stderr +stderr: +input: +input: + | (* *) + (*) + (*) +./calc.at:1457: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' + | (1 + 1) / (1 - 1) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1449: cat stderr +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: $PREPARSER ./calc /dev/null + | (# + 1) = 1111 +stderr: +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: +stderr: + | (#) + (#) = 2222 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1453: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 + | (1 + 1) / (1 - 1) +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1457: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1451: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1443: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1457: $PREPARSER ./calc input +input: +1.11-17: error: null divisor + | (!!) + (1 2) = 1 +./calc.at:1446: cat stderr +stderr: +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 22 @@ -139842,20 +165322,24 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1441: cat stderr +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1453: cat stderr +stderr: +input: +1.11: syntax error +1.1-16: error: 2222 != 1 + | (1 + # + 1) = 1111 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc input +input: +stderr: +stderr: +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -139997,145 +165481,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1441: $PREPARSER ./calc input -stderr: -./calc.at:1440: cat stderr -530. calc.at:1440: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1435: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1441: cat stderr -input: -./calc.at:1441: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1435: $PREPARSER ./calc input -539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... -stderr: -./calc.at:1454: mv calc.y.tmp calc.y - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1449: cat stderr +./calc.at:1457: $PREPARSER ./calc input +533. calc.at:1445: ok +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -140145,29 +165494,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 2 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: cat stderr stderr: -1.3: syntax error, unexpected number -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1441: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -140485,9 +165815,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (1 + #) = 1111 stderr: +./calc.at:1449: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -140805,13 +166139,57 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: cat stderr +538. calc.at:1453: ok +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... -./calc.at:1455: mv calc.y.tmp calc.y +1.6: syntax error: invalid character: '#' +./calc.at:1457: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1451: $PREPARSER ./calc input +stderr: - | 1//2 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1446: cat stderr +./calc.at:1448: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +input: +./calc.at:1443: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -140821,17 +166199,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: $PREPARSER ./calc input stderr: -./calc.at:1441: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 input: +stderr: + +syntax error: invalid character: '#' +syntax error: invalid character: '#' | (!!) + (1 2) = 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1446: $PREPARSER ./calc input +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1449: cat stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -140841,8 +166232,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr stderr: +1.11-17: error: null divisor +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -140983,9 +166376,12 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +stderr: stderr: Starting parse Entering state 0 @@ -141127,10 +166523,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1457: cat stderr +1.2: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr stderr: -./calc.at:1441: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141140,12 +166539,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected invalid token -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1441: cat stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +input: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141156,11 +166563,42 @@ }eg ' expout || exit 77 input: +stderr: + | (* *) + (*) + (*) +./calc.at:1448: cat stderr +./calc.at:1446: cat stderr +./calc.at:1451: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' | (- *) + (1 2) = 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: cat stderr +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1449: cat stderr stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error input: +stderr: + | (1 + # + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -141309,11 +166747,16 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 = 2 = 3 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1457: cat stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +544. calc.at:1468: testing Calculator glr.cc ... +./calc.at:1468: mv calc.y.tmp calc.y + Starting parse Entering state 0 Stack now 0 @@ -141462,7 +166905,19 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: "$PERL" -pi -e 'use strict; +stderr: + | (# + 1) = 1111 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1451: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +535. calc.at:1448: ok +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141472,15 +166927,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error, unexpected '=' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1441: cat stderr -1.7: syntax error, unexpected '=' -input: - | (* *) + (*) + (*) -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +syntax error: invalid character: '#' +./calc.at:1451: $PREPARSER ./calc input +545. calc.at:1469: testing Calculator glr2.cc ... +./calc.at:1469: mv calc.y.tmp calc.y + +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141490,9 +166943,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1435: cat stderr +./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1446: cat stderr +./calc.at:1449: cat stderr +./calc.at:1457: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +stderr: +./calc.at:1446: $PREPARSER ./calc input +input: + +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (1 + # + 1) = 1111 +./calc.at:1457: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +./calc.at:1449: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -141644,7 +167114,19 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1451: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +1.11-17: error: null divisor stderr: Starting parse Entering state 0 @@ -141797,11 +167279,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | - | +1 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141811,14 +167291,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr stderr: -2.1: syntax error, unexpected '+' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: cat stderr -stderr: -2.1: syntax error, unexpected '+' +./calc.at:1446: cat stderr input: -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1449: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141828,10 +167317,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1457: $PREPARSER ./calc input +input: | 1 + 2 * 3 + !+ ++ -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: cat stderr +./calc.at:1446: $PREPARSER ./calc input +stderr: +error: null divisor +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr stderr: +error: null divisor +536. calc.at:1449: stderr: + ok +input: Starting parse Entering state 0 Stack now 0 @@ -141912,11 +167410,14 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: $PREPARSER ./calc /dev/null + | (#) + (#) = 2222 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of input Starting parse Entering state 0 Stack now 0 @@ -141997,14 +167498,20 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1457: cat stderr stderr: -1.1: syntax error, unexpected end of input + +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: | 1 + 2 * 3 + !- ++ -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; +541. calc.at:1457: ok +./calc.at:1446: $PREPARSER ./calc input +546. calc.at:1476: testing Calculator C++ %glr-parser ... +./calc.at:1476: mv calc.y.tmp calc.y + +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142014,8 +167521,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -./calc.at:1435: cat stderr Starting parse Entering state 0 Stack now 0 @@ -142096,10 +167603,9 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 Starting parse Entering state 0 Stack now 0 @@ -142180,14 +167686,12 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1435: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1441: "$PERL" -pi -e 'use strict; + +input: + | (1 + #) = 1111 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142197,32 +167701,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1441: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +./calc.at:1446: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: | (#) + (#) = 2222 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: cat stderr -input: +./calc.at:1446: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: - | (!!) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -142346,11 +167835,17 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -142475,20 +167970,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1451: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142498,19 +167981,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: cat stderr -./calc.at:1435: cat stderr +547. calc.at:1476: testing Calculator glr2.cc ... +./calc.at:1446: cat stderr +./calc.at:1476: mv calc.y.tmp calc.y + +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: input: + | (# + 1) = 1111 +./calc.at:1451: $PREPARSER ./calc input | (1 + #) = 1111 -./calc.at:1441: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -142611,12 +168095,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... Starting parse Entering state 0 Stack now 0 @@ -142716,7 +168199,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142726,7 +168209,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1477: mv calc.y.tmp calc.y + +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142736,19 +168222,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: cat stderr -./calc.at:1435: cat stderr +./calc.at:1451: cat stderr +./calc.at:1446: cat stderr input: + | (1 + # + 1) = 1111 +./calc.at:1451: $PREPARSER ./calc input input: | (# + 1) = 1111 - | (* *) + (*) + (*) -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.6: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -142846,11 +168331,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -142948,7 +168431,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142958,8 +168441,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142969,17 +168451,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1451: cat stderr +./calc.at:1446: cat stderr input: -./calc.at:1441: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1435: $PREPARSER ./calc input -stderr: + | (1 + 1) / (1 - 1) +./calc.at:1451: $PREPARSER ./calc input input: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + # + 1) = 1111 -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: Starting parse Entering state 0 @@ -143096,10 +168579,20 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +1.11-17: error: null divisor +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: - | 1 + 2 * 3 + !- ++ +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -143215,22 +168708,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: $PREPARSER ./calc input -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1441: cat stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143240,13 +168718,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1451: cat stderr +./calc.at:1446: cat stderr input: | (1 + 1) / (1 - 1) -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -143389,11 +168865,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +537. calc.at:1451: ok +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -143536,10 +169009,518 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: cat stderr +./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +549. calc.at:1477: testing Calculator glr2.cc %locations ... +./calc.at:1477: mv calc.y.tmp calc.y + +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +534. calc.at:1446: ok + +./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +532. calc.at:1443: ok +./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + +stderr: +stdout: +./calc.at:1458: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | 1 2 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1458: cat stderr +input: + | 1//2 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1458: cat stderr +input: + | error +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1458: cat stderr +input: + | + | +1 +./calc.at:1458: $PREPARSER ./calc input +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: cat stderr +./calc.at:1458: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1458: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1458: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1458: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1458: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1458: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1458: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1458: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1458: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1458: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1458: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1458: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +./calc.at:1458: cat stderr +542. calc.at:1458: ok + +552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1479: mv calc.y.tmp calc.y + +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +stdout: +./calc.at:1459: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 2 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1459: cat stderr +input: + | 1//2 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: cat stderr +input: + | error +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1459: cat stderr +input: + | + | +1 +./calc.at:1459: $PREPARSER ./calc input +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: cat stderr +./calc.at:1459: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1459: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1459: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1459: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1459: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1459: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1459: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1459: cat stderr +stderr: +stdout: +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | (1 + #) = 1111 +./calc.at:1459: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +input: +1.6: syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1476: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1478: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: cat stderr +stderr: +stderr: +input: +input: + | 1 2 +./calc.at:1478: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1459: $PREPARSER ./calc input + | 1 2 +stderr: +1.3: syntax error +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +syntax error +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +syntax error +./calc.at:1468: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143549,7 +169530,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: "$PERL" -pi -e 'use strict; +input: +./calc.at:1459: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143559,26 +169556,986 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1478: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1476: cat stderr +stderr: +1.6: syntax error: invalid character: '#' + | 1//2 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1//2 +input: +./calc.at:1476: $PREPARSER ./calc input +1.3: syntax error +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' + | 1 2 +./calc.at:1468: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +1.3: syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error stderr: stdout: -./calc.at:1441: cat stderr -./types.at:139: $PREPARSER ./test +./calc.at:1459: cat stderr +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +syntax error +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + 1) / (1 - 1) +./calc.at:1459: $PREPARSER ./calc input +input: stderr: -./calc.at:1435: cat stderr -531. calc.at:1441: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +stderr: +input: +./calc.at:1476: cat stderr + | error +./calc.at:1459: cat stderr +./calc.at:1478: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +input: +./calc.at:1468: cat stderr +stderr: + | error +./calc.at:1476: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +543. calc.at:1459: ok +stderr: +1.1: syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1//2 +1.3: syntax error +./calc.at:1468: $PREPARSER ./calc input +stderr: +syntax error +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1478: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +input: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1476: cat stderr +input: +stderr: + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +1.7: syntax error +input: + | 1 = 2 = 3 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1479: mv calc.y.tmp calc.y + +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr +stderr: +stderr: +1.7: syntax error +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +stderr: +input: + | error +./calc.at:1468: $PREPARSER ./calc input +syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1477: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +input: + | error +./calc.at:1477: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | + | +1 +./calc.at:1476: $PREPARSER ./calc input +stderr: +2.1: syntax error +1.1: syntax error +./calc.at:1468: cat stderr +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +stderr: +./calc.at:1468: $PREPARSER ./calc input +syntax error +stderr: +./calc.at:1478: cat stderr +./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +./calc.at:1478: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | 1 = 2 = 3 +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: +1.1: syntax error +stderr: +1.7: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: $PREPARSER ./calc /dev/null +1.7: syntax error +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +./calc.at:1468: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1477: cat stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: + | + | +1 +./calc.at:1476: cat stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1478: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1468: cat stderr +./calc.at:1477: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: $PREPARSER ./calc /dev/null +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1476: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +stderr: +1.1: syntax error +syntax error +input: + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +error: 2222 != 1 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 +./calc.at:1478: cat stderr +./calc.at:1477: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: $PREPARSER ./calc input +input: +input: + | (- *) + (1 2) = 1 +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1478: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +input: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 + | (- *) + (1 2) = 1 +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +syntax error +syntax error +error: 2222 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (!!) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1476: cat stderr +input: +./calc.at:1468: cat stderr + | (* *) + (*) + (*) +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: +input: + | (* *) + (*) + (*) +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +syntax error +syntax error +syntax error +./calc.at:1477: cat stderr +stderr: +syntax error +error: 2222 != 1 +input: + | (- *) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +input: +./calc.at:1476: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1468: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: $PREPARSER ./calc input +stderr: +stderr: +input: +./calc.at:1477: cat stderr + | (- *) + (1 2) = 1 +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +input: +stderr: +input: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1477: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: cat stderr +input: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +stderr: +syntax error +syntax error +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +stderr: +stderr: +./calc.at:1476: $PREPARSER ./calc input +syntax error +syntax error +syntax error +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr | (1 + #) = 1111 -======== Testing with C++ standard flags: '' -./calc.at:1435: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1478: $PREPARSER ./calc input stderr: +input: 1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - + | 1 + 2 * 3 + !+ ++ +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr stderr: +stderr: +./calc.at:1477: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.6: syntax error: invalid character: '#' -./calc.at:1435: "$PERL" -pi -e 'use strict; +input: +stderr: + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143588,16 +170545,70 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +input: +input: + | (#) + (#) = 2222 +stderr: +./calc.at:1477: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +syntax error: invalid character: '#' +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: | (# + 1) = 1111 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 1.2: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1468: cat stderr +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143607,23 +170618,116 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +input: + | (#) + (#) = 2222 +input: +./calc.at:1468: $PREPARSER ./calc input +input: + | (# + 1) = 1111 + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' input: | (1 + # + 1) = 1111 -541. calc.at:1457: testing Calculator C++ parse.error=custom ... -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1457: mv calc.y.tmp calc.y - +./calc.at:1478: $PREPARSER ./calc input stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: 1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1468: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +./calc.at:1477: cat stderr +input: +input: + | (1 + # + 1) = 1111 + | (1 + #) = 1111 +input: +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input stderr: -stdout: -./types.at:139: ./check -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1478: cat stderr +syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +stderr: +1.2: syntax error: invalid character: '#' + | (1 + 1) / (1 - 1) +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143633,17 +170737,117 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1435: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr +./calc.at:1476: cat stderr +./calc.at:1477: cat stderr +input: input: +input: + | (1 + # + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: cat stderr | (1 + 1) / (1 - 1) -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +error: null divisor +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +550. calc.at:1478: ok +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +error: null divisor +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1468: cat stderr +./calc.at:1476: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1477: $PREPARSER ./calc input stderr: +input: 1.11-17: error: null divisor -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +546. calc.at:1476: ok + | (1 + # + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input stderr: 1.11-17: error: null divisor -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143653,31 +170857,409 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr -527. calc.at:1435: ok -./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +syntax error: invalid character: '#' + +./calc.at:1477: cat stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +548. calc.at:1477: ok +./calc.at:1468: cat stderr +554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1480: mv calc.y.tmp calc.y + +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | (1 + 1) / (1 - 1) +./calc.at:1468: $PREPARSER ./calc input +stderr: +error: null divisor +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1480: mv calc.y.tmp calc.y + +./calc.at:1468: cat stderr +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +544. calc.at:1468: ok + +556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... +./calc.at:1482: mv calc.y.tmp calc.y + +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +557. calc.at:1482: testing Calculator glr2.cc %debug ... +./calc.at:1482: mv calc.y.tmp calc.y + +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./calc.at:1469: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1469: $PREPARSER ./calc input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1458: mv calc.y.tmp calc.y +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc -./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1476: $PREPARSER ./calc input +input: +stderr: + | 1 2 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +input: + | 1 2 +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1469: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1//2 +./calc.at:1469: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1476: cat stderr +syntax error +input: + | 1//2 +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: cat stderr +input: + | error +./calc.at:1469: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1476: cat stderr +input: + | error +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +./calc.at:1469: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: cat stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 = 2 = 3 +syntax error +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1469: $PREPARSER ./calc input +stderr: +./calc.at:1476: cat stderr +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +input: + | + | +1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: cat stderr +stderr: +./calc.at:1469: $PREPARSER ./calc /dev/null stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc /dev/null +stderr: +input: + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: cat stderr +stderr: +stderr: +1.3: syntax error +syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1469: $PREPARSER ./calc input stderr: stdout: -./calc.at:1445: "$PERL" -ne ' +input: +./calc.at:1478: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -143688,6 +171270,9 @@ || /\t/ )' calc.cc + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +stderr: input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -143702,2092 +171287,2636 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +stderr: +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +1.3: syntax error +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 2 +./calc.at:1478: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +./calc.at:1477: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | error +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +input: +input: +./calc.at:1478: cat stderr + | (!!) + (1 2) = 1 + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1469: $PREPARSER ./calc input +stderr: +1.1: syntax error +stderr: +input: +syntax error +error: 2222 != 1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +error: 2222 != 1 + | 1//2 +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +input: + | 1 = 2 = 3 +./calc.at:1477: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.7: syntax error +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: cat stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +input: +./calc.at:1478: cat stderr +1.7: syntax error + | (- *) + (1 2) = 1 +./calc.at:1469: $PREPARSER ./calc input +input: +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error +./calc.at:1476: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1478: cat stderr + | (* *) + (*) + (*) +input: +./calc.at:1476: $PREPARSER ./calc input + | + | +1 +stderr: +./calc.at:1477: $PREPARSER ./calc input +syntax error +syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +stderr: +stderr: +2.1: syntax error +1.7: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +stderr: +2.1: syntax error +./calc.at:1469: $PREPARSER ./calc input +stderr: +1.7: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +syntax error +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +stderr: +./calc.at:1477: $PREPARSER ./calc /dev/null +syntax error +syntax error +syntax error +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +./calc.at:1478: cat stderr +stderr: +1.1: syntax error +input: +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: $PREPARSER ./calc input +stdout: +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + + | + | +1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input +2.1: syntax error +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: +./calc.at:1478: cat stderr +./calc.at:1478: $PREPARSER ./calc /dev/null +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1477: cat stderr +./calc.at:1469: cat stderr +stderr: +stderr: +syntax error, unexpected number +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1476: $PREPARSER ./calc input +stderr: +1.1: syntax error + | 1 + 2 * 3 + !+ ++ +./calc.at:1469: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1469: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1478: cat stderr +./calc.at:1477: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error, unexpected number +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (!!) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1469: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: +1.11: syntax error +1.1-16: error: 2222 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: + | (#) + (#) = 2222 +./calc.at:1476: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: cat stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: + | 1//2 +./calc.at:1479: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1477: cat stderr +stderr: +./calc.at:1478: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (!!) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1469: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +input: + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1479: cat stderr +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input +input: +stderr: + | error +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1478: cat stderr +syntax error: invalid character: '#' +syntax error, unexpected invalid token +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +input: +syntax error, unexpected invalid token + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1469: cat stderr +stderr: +stdout: +./calc.at:1477: cat stderr +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1469: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1479: cat stderr +./calc.at:1476: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: +input: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +stderr: +stderr: +input: +input: + | 1 + 2 * 3 + !- ++ + | 1 = 2 = 3 +./calc.at:1479: $PREPARSER ./calc input +stderr: +./calc.at:1477: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '=' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1478: cat stderr +stderr: +stderr: +stdout: +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: + | 1 2 +./calc.at:1480: $PREPARSER ./calc input +stderr: +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 Next token is token '=' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) -> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) -> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token Next token is token ')' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '-' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) -> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) -> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) -> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () -> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) -> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) -> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () -> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) -> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 Next token is token '=' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) -> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) -> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token Next token is token ')' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Next token is token '=' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '-' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) -> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) -> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' () -Reducing stack by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) -> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) -> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () -> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) -> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) -> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () -> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token Next token is token '^' () Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 -Stack now 0 6 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) -> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: | 1 2 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: stderr: +syntax error, unexpected number Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token number (2) -Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token "number" (2) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (* *) + (*) + (*) +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token number (2) -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token "number" (2) +stderr: +./calc.at:1480: cat stderr +stderr: +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '=' +./calc.at:1476: cat stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +input: + | 1//2 +input: +./calc.at:1480: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145797,75 +173926,182 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1479: cat stderr +syntax error: invalid character: '#' +./calc.at:1478: cat stderr +./calc.at:1482: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1477: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1480: cat stderr + | (#) + (#) = 2222 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: cat stderr + | + | +1 +./calc.at:1479: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !+ ++ input: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '+' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1//2 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +input: +input: + | error Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 -Stack now 0 8 22 Reading a token Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error Error: popping token '/' () -Stack now 0 8 Error: popping nterm exp (1) -Stack now 0 Cleanup: discarding lookahead token '/' () -Stack now 0 -stderr: -stdout: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1469: $PREPARSER ./calc input +syntax error, unexpected '+' +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 -Stack now 0 8 22 Reading a token Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error Error: popping token '/' () -Stack now 0 8 Error: popping nterm exp (1) -Stack now 0 Cleanup: discarding lookahead token '/' () -Stack now 0 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1445: "$PERL" -pi -e 'use strict; +stderr: +input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1476: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +stderr: +syntax error, unexpected invalid token +stderr: +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145875,37 +174111,165 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +error: null divisor +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | error -./calc.at:1445: $PREPARSER ./calc input stderr: -stdout: +./calc.at:1479: cat stderr stderr: -./types.at:139: $PREPARSER ./test +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +error: null divisor +./calc.at:1479: $PREPARSER ./calc /dev/null +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +stderr: +syntax error, unexpected end of input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +./calc.at:1482: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected end of input +./calc.at:1469: cat stderr +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1476: cat stderr +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1480: cat stderr +./calc.at:1478: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1479: cat stderr +./calc.at:1477: cat stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +547. calc.at:1476: ok +input: +stderr: +error: null divisor stderr: +input: + | (#) + (#) = 2222 +./calc.at:1478: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1445: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () + | 1 = 2 = 3 +stderr: +./calc.at:1480: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145915,93 +174279,250 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +stderr: +syntax error, unexpected '=' +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: cat stderr +syntax error, unexpected '=' +./calc.at:1478: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input +stderr: +input: + | (1 + #) = 1111 +./calc.at:1478: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +stderr: +1.6: syntax error: invalid character: '#' + +./calc.at:1482: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +input: +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input input: | 1 = 2 = 3 -./calc.at:1445: $PREPARSER ./calc input stderr: +./calc.at:1482: $PREPARSER ./calc input +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '=' () -syntax error, unexpected '=' +syntax error Error: popping nterm exp (2) -Stack now 0 8 18 Error: popping token '=' () -Stack now 0 8 Error: popping nterm exp (1) -Stack now 0 Cleanup: discarding lookahead token '=' () -Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1469: cat stderr +stderr: stderr: +input: +syntax error, unexpected number +error: 2222 != 1 +stderr: +syntax error, unexpected '+' + | (1 + # + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '=' () -syntax error, unexpected '=' +syntax error Error: popping nterm exp (2) -Stack now 0 8 18 Error: popping token '=' () -Stack now 0 8 Error: popping nterm exp (1) -Stack now 0 Cleanup: discarding lookahead token '=' () -Stack now 0 +./calc.at:1478: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +545. calc.at:1469: stderr: + ok +syntax error, unexpected '+' +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1479: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +./calc.at:1480: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: cat stderr +stderr: +./calc.at:1480: $PREPARSER ./calc /dev/null +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -146011,68 +174532,119 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1445: cat stderr input: +syntax error, unexpected end of input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 | | +1 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1477: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '+' () -syntax error, unexpected '+' +syntax error Error: popping nterm input () -Stack now 0 Cleanup: discarding lookahead token '+' () -Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + 1) / (1 - 1) +input: stderr: +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1479: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token Next token is token '+' () -syntax error, unexpected '+' +syntax error Error: popping nterm input () -Stack now 0 Cleanup: discarding lookahead token '+' () -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +stderr: +syntax error, unexpected end of input +./calc.at:1478: $PREPARSER ./calc input +1.11-17: error: null divisor +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +1.11-17: error: null divisor +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -146082,33 +174654,87 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1480: cat stderr stderr: -stdout: -./calc.at:1445: cat stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1445: $PREPARSER ./calc /dev/null +1.6: syntax error: invalid character: '#' +./calc.at:1482: cat stderr +./calc.at:1477: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: $PREPARSER ./calc /dev/null +./calc.at:1478: cat stderr +stderr: +./calc.at:1479: cat stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +549. calc.at:1477: ok +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + 1) / (1 - 1) +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.11-17: error: null divisor stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1445: "$PERL" -pi -e 'use strict; +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -146118,649 +174744,607 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... +558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... +./calc.at:1486: mv calc.y.tmp calc.y + +559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... +./calc.at:1485: mv calc.y.tmp calc.y + +./calc.at:1485: mv calc.y.tmp calc.y + +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./calc.at:1480: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | (!!) + (1 2) = 1 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1478: cat stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: cat stderr +stderr: +syntax error, unexpected number +error: 2222 != 1 input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +551. calc.at:1478: ok +syntax error, unexpected number +error: 2222 != 1 stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 4 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (3) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Error: popping token '+' () -Stack now 0 8 20 4 12 Error: popping nterm exp (3) -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Error: popping token '*' () -Stack now 0 8 20 4 12 Error: popping nterm exp (2) -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) error: 4444 != 1 -> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 4 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (3) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Error: popping token '+' () -Stack now 0 8 20 4 12 Error: popping nterm exp (3) -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Error: popping token '*' () -Stack now 0 8 20 4 12 Error: popping nterm exp (2) -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) error: 4444 != 1 -> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +input: +./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: + | (1 + #) = 1111 +./calc.at:1479: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -146770,322 +175354,284 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1482: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1479: cat stderr input: + | (# + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input | (!!) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1443: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1445: $PREPARSER ./calc input -input: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 4 5 Reading a token Next token is token '!' () Shifting token '!' () Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): +Reducing stack 0 by rule 16 (line 107): $1 = token '!' () $2 = token '!' () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +stderr: +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 4 5 Reading a token Next token is token '!' () Shifting token '!' () Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): +Reducing stack 0 by rule 16 (line 107): $1 = token '!' () $2 = token '!' () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1445: "$PERL" -pi -e 'use strict; +input: +./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147095,2757 +175641,600 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1479: cat stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1486: mv calc.y.tmp calc.y + +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | (1 + # + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error: invalid character: '#' +./calc.at:1482: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr input: +stderr: +syntax error: invalid character: '#' | (- *) + (1 2) = 1 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1480: $PREPARSER ./calc input +stderr: stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 -Stack now 0 4 2 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): +Reducing stack 0 by rule 15 (line 106): $1 = token '-' () $2 = token error () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () -Stack now 0 8 20 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +stderr: +input: + | (1 + 1) / (1 - 1) +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +error: null divisor +./calc.at:1480: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +error: null divisor +input: +./calc.at:1482: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +552. calc.at:1479: ok +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 2 -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: - | (* *) + (*) + (*) -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1443: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 +syntax error Shifting token error () Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 Next token is token '*' () Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '\n' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149855,35 +176244,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1445: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; + +./calc.at:1480: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149893,424 +176259,308 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: cat stderr stderr: -stdout: -./types.at:139: $PREPARSER ./test +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr stderr: +syntax error: invalid character: '#' input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 8 20 5 Reading a token Next token is token '+' () Shifting token '+' () Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () $2 = token '+' () -Stack now 0 8 20 Cleanup: popping token '+' () Cleanup: popping nterm exp (7) - | error +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... +./calc.at:1487: mv calc.y.tmp calc.y + +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 stderr: +./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error: invalid character: '#' Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 8 20 5 Reading a token Next token is token '+' () Shifting token '+' () Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () $2 = token '+' () -Stack now 0 8 20 Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 input: | 1 + 2 * 3 + !- ++ -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 8 20 5 Reading a token Next token is token '-' () Shifting token '-' () Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () $2 = token '-' () -Stack now 0 8 20 Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1443: cat stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -442. types.at:139: ok +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' () -Reducing stack by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 Next token is token '+' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 -Stack now 0 8 20 5 Reading a token Next token is token '-' () Shifting token '-' () Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () $2 = token '-' () -Stack now 0 8 20 Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -input: - | 1 = 2 = 3 -./calc.at:1443: $PREPARSER ./calc input -stderr: - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150320,52 +176570,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1445: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: - | (#) + (#) = 2222 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150375,288 +176581,217 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (1 + # + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) -> $$ = nterm exp (2222) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1443: cat stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +syntax error: invalid character: '#' +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) -> $$ = nterm exp (2222) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | - | +1 -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150666,35 +176801,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1482: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1445: cat stderr +stdout: +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + input: -./calc.at:1443: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150704,235 +176829,187 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1445: $PREPARSER ./calc input -543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... -./calc.at:1459: mv calc.y.tmp calc.y - -stderr: -./calc.at:1443: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token syntax error: invalid character: '#' Next token is token error () Error: popping token '+' () -Stack now 0 4 12 Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1443: $PREPARSER ./calc /dev/null Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token syntax error: invalid character: '#' Next token is token error () Error: popping token '+' () -Stack now 0 4 12 Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1480: cat stderr +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1480: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150942,8 +177019,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +stderr: +input: + | 1 2 +./calc.at:1480: $PREPARSER ./calc input +stderr: +error: null divisor +stderr: +./calc.at:1482: cat stderr +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error, unexpected number + | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150953,846 +177044,167 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1480: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +554. calc.at:1480: ok Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token syntax error: invalid character: '#' Next token is token error () Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1480: cat stderr +input: + | 1//2 +./calc.at:1480: $PREPARSER ./calc input + stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: "$PERL" -pi -e 'use strict; +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151802,8 +177214,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1482: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151815,388 +177230,190 @@ ' expout || exit 77 input: | (1 + # + 1) = 1111 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: cat stderr +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: cat stderr stderr: +input: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token syntax error: invalid character: '#' Next token is token error () Error: popping token '+' () -Stack now 0 4 12 Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1480: $PREPARSER ./calc input +stderr: +563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./calc.at:1489: mv calc.y.tmp calc.y + +syntax error, unexpected invalid token stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token syntax error: invalid character: '#' Next token is token error () Error: popping token '+' () -Stack now 0 4 12 Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token Next token is token '\n' () -Reducing stack by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) -> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +syntax error, unexpected invalid token +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152206,149 +177423,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1445: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152358,598 +177433,254 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1482: cat stderr +./calc.at:1480: cat stderr input: -./calc.at:1443: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1445: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1480: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input +stderr: +syntax error, unexpected '=' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: + | (1 + 1) / (1 - 1) +syntax error, unexpected '=' +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 4 12 20 29 Reading a token Next token is token ')' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () -> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 -Stack now 0 8 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 22 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 28 -Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) -> $$ = nterm exp (0) Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () -> $$ = nterm exp (0) Entering state 31 -Stack now 0 8 22 31 Reading a token Next token is token '\n' () -Reducing stack by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 93): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) error: null divisor -> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 -Stack now 0 4 12 20 29 Reading a token Next token is token ')' () -Reducing stack by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Stack now 0 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () -> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 -Stack now 0 8 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 22 4 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 -Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 28 -Stack now 0 8 22 4 12 19 28 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) -> $$ = nterm exp (0) Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () -> $$ = nterm exp (0) Entering state 31 -Stack now 0 8 22 31 Reading a token Next token is token '\n' () -Reducing stack by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 93): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) error: null divisor -> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152959,7 +177690,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152969,937 +177701,885 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr -./calc.at:1445: cat stderr -533. calc.at:1445: input: - ok - | (* *) + (*) + (*) -./calc.at:1443: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1482: cat stderr + | + | +1 +./calc.at:1480: $PREPARSER ./calc input +stderr: +556. calc.at:1482: ok +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input +syntax error, unexpected '+' + +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '+' stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -Starting parse -Entering state 0 -Stack now 0 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1443: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -544. calc.at:1468: testing Calculator glr.cc ... -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: mv calc.y.tmp calc.y - -./calc.at:1443: cat stderr -./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | (#) + (#) = 2222 -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -153909,1245 +178589,891 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1443: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./calc.at:1454: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stdout: -stderr: -./types.at:139: ./check -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./calc.at:1443: cat stderr -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (# + 1) = 1111 -./calc.at:1443: $PREPARSER ./calc input -input: - | 1 2 -./calc.at:1454: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.3: syntax error, unexpected number -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1453: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -1.3: syntax error, unexpected number -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: cat stderr -./calc.at:1453: $PREPARSER ./calc input -input: -./calc.at:1443: cat stderr -stderr: - | 1//2 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + # + 1) = 1111 -stderr: -./calc.at:1443: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: - | 1 2 -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error, unexpected number -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -1.3: syntax error, unexpected number - | error -./calc.at:1454: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error, unexpected invalid token -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1453: cat stderr -1.1: syntax error, unexpected invalid token -./calc.at:1443: cat stderr -input: - | 1//2 -input: -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1443: $PREPARSER ./calc input -stderr: -./calc.at:1454: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.7: syntax error, unexpected '=' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1443: cat stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: - | error -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -532. calc.at:1443: ok -1.1: syntax error, unexpected invalid token -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr + | 1 2 +./calc.at:1482: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected invalid token -input: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./calc.at:1489: mv calc.y.tmp calc.y -./calc.at:1454: $PREPARSER ./calc input -stderr: -stdout: -stderr: -2.1: syntax error, unexpected '+' -./types.at:139: $PREPARSER ./test -./calc.at:1453: cat stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1453: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' -======== Testing with C++ standard flags: '' -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1454: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: $PREPARSER ./calc /dev/null -./calc.at:1453: cat stderr -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: $PREPARSER ./calc /dev/null stderr: -./calc.at:1453: $PREPARSER ./calc input -1.1: syntax error, unexpected end of file +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) stderr: -2.1: syntax error, unexpected '+' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error, unexpected '+' -545. calc.at:1469: testing Calculator glr2.cc ... -./calc.at:1454: "$PERL" -pi -e 'use strict; +syntax error, unexpected end of input +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155157,11 +179483,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: mv calc.y.tmp calc.y - -./calc.at:1454: cat stderr -./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155171,78 +179493,72 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1482: cat stderr input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1453: cat stderr -./calc.at:1454: $PREPARSER ./calc input -stderr: -./calc.at:1453: $PREPARSER ./calc /dev/null -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected end of input -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.1: syntax error, unexpected end of input -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: cat stderr -./calc.at:1454: cat stderr input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1453: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input + | 1//2 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: - | (!!) + (1 2) = 1 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $PREPARSER ./calc input stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1453: "$PERL" -pi -e 'use strict; +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155252,11 +179568,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./types.at:139: ./check -./calc.at:1453: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155266,35 +179578,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1454: cat stderr +./calc.at:1480: cat stderr +./calc.at:1482: cat stderr input: | (!!) + (1 2) = 1 -./calc.at:1453: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1454: $PREPARSER ./calc input -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +./calc.at:1480: $PREPARSER ./calc input + | error +./calc.at:1482: $PREPARSER ./calc input stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stdout: -./types.at:139: $PREPARSER ./test +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155304,8 +179618,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155315,79 +179629,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1453: cat stderr -./calc.at:1454: cat stderr -input: input: | (- *) + (1 2) = 1 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (* *) + (*) + (*) -./calc.at:1454: $PREPARSER ./calc input -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -443. types.at:139: ok -./calc.at:1453: cat stderr -./calc.at:1454: cat stderr -input: -input: - | (* *) + (*) + (*) -./calc.at:1453: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1454: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: cat stderr stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 input: -./calc.at:1453: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155397,37 +179655,73 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1453: cat stderr -./calc.at:1454: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr stderr: -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () input: - | 1 + 2 * 3 + !- ++ -./calc.at:1453: $PREPARSER ./calc input -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155437,43 +179731,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -546. calc.at:1476: testing Calculator C++ %glr-parser ... -./calc.at:1476: mv calc.y.tmp calc.y - - | (#) + (#) = 2222 -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1453: cat stderr -./calc.at:1454: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1482: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1453: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1453: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155483,59 +179754,58 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1454: cat stderr stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1480: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1454: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1480: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1454: cat stderr - | (# + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' +stdout: input: - | (# + 1) = 1111 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155545,11 +179815,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.2: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: "$PERL" -ne ' + | 1 + 2 * 3 + !- ++ +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1479: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -155558,29 +179827,12 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc + )' calc.cc calc.hh +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1453: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1454: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -155594,16 +179846,26 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1457: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc /dev/null stderr: -1.6: syntax error: invalid character: '#' stderr: -input: - | (1 + # + 1) = 1111 -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -155613,84 +179875,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1480: cat stderr stderr: -./calc.at:1453: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1453: $PREPARSER ./calc input +stdout: input: | 1 2 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1454: cat stderr -1.11-17: error: null divisor -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -stderr: -input: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | (1 + 1) / (1 - 1) -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: $PREPARSER ./calc input -stderr: -./calc.at:1453: cat stderr -1.11-17: error: null divisor -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -stderr: -1.11-17: error: null divisor -538. calc.at:1453: ok -input: - | 1//2 -./calc.at:1457: $PREPARSER ./calc input -stderr: - -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stdout: -./calc.at:1454: cat stderr -./calc.at:1458: "$PERL" -ne ' +./calc.at:1485: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -155702,44 +179893,24 @@ )' calc.cc input: -539. calc.at:1454: ok - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1458: $PREPARSER ./calc input -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -stderr: -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr - -input: - | error -input: -./calc.at:1457: $PREPARSER ./calc input - | 1 2 -./calc.at:1458: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: $PREPARSER ./calc input stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stdout: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input +input: stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1448: "$PERL" -ne ' +./calc.at:1486: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -155748,14 +179919,11 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -547. calc.at:1476: testing Calculator glr2.cc ... -input: -./calc.at:1476: mv calc.y.tmp calc.y - +syntax error: invalid character: '#' +syntax error: invalid character: '#' | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -155769,50 +179937,15 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1458: cat stderr -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1457: cat stderr -stderr: -input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -input: - | 1 = 2 = 3 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1458: $PREPARSER ./calc input -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./calc.at:1455: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -input: -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | 1 2 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error, unexpected number +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: cat stderr stderr: -1.3: syntax error -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -155826,4352 +179959,2779 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1477: mv calc.y.tmp calc.y - -stderr: -1.3: syntax error -./calc.at:1458: cat stderr -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1457: cat stderr -input: - | error -./calc.at:1458: $PREPARSER ./calc input -input: - | - | +1 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1448: cat stderr -./calc.at:1458: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1457: cat stderr -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -input: -./calc.at:1448: $PREPARSER ./calc input - | 1 = 2 = 3 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1457: $PREPARSER ./calc /dev/null -./calc.at:1458: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected number +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -1.3: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: | 1 2 -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.3: syntax error -./calc.at:1455: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1457: cat stderr -input: -./calc.at:1458: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1448: cat stderr -stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -input: -input: - | - | +1 -stderr: -./calc.at:1458: $PREPARSER ./calc input - | error -./calc.at:1448: $PREPARSER ./calc input -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -input: -stderr: -stderr: - | 1//2 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1455: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1457: cat stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 - | (!!) + (1 2) = 1 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1458: cat stderr -stderr: -./calc.at:1448: cat stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: $PREPARSER ./calc /dev/null -./calc.at:1455: cat stderr -input: -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 = 2 = 3 -./calc.at:1448: $PREPARSER ./calc input -input: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -./calc.at:1455: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: cat stderr -stderr: -stderr: -1.7: syntax error -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 - | (- *) + (1 2) = 1 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input -input: -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1458: $PREPARSER ./calc input -stderr: -./calc.at:1448: cat stderr -./calc.at:1455: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: - | - | +1 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./calc.at:1455: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1457: cat stderr -2.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error -./calc.at:1458: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1457: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 8 18 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -input: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | (!!) + (1 2) = 1 -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1448: cat stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -stderr: -./calc.at:1448: $PREPARSER ./calc /dev/null -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -./calc.at:1457: cat stderr -1.1: syntax error -input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | - | +1 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1458: cat stderr -stdout: -input: -stderr: -./calc.at:1449: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: -1.1: syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1457: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -input: -./calc.at:1458: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1449: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1455: cat stderr -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1455: $PREPARSER ./calc /dev/null -input: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1448: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 - | 1 2 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1458: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -1.3: syntax error -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1457: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr - | (#) + (#) = 2222 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1448: cat stderr -input: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1449: cat stderr -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: - | (!!) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input - | 1//2 -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1458: cat stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1457: cat stderr -./types.at:139: $PREPARSER ./test -stderr: +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 2stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) +Next token is token ')' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.7-18: ) +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -1.3: syntax error -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1458: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +4 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 Reading a token -Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1448: cat stderr -stderr: -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1455: cat stderr -syntax error: invalid character: '#' -./calc.at:1449: cat stderr -input: -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input - | (!!) + (1 2) = 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input -stderr: -input: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -./calc.at:1457: cat stderr -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -stderr: -1.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: $PREPARSER ./calc input -445. types.at:139: ok -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 8 20 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -1.1: syntax error -./calc.at:1458: cat stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1448: cat stderr -syntax error: invalid character: '#' -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -160181,41 +182741,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: -./calc.at:1455: cat stderr - | (* *) + (*) + (*) -./calc.at:1448: $PREPARSER ./calc input - | (#) + (#) = 2222 - -./calc.at:1458: $PREPARSER ./calc input -stderr: -./calc.at:1449: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1457: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1455: $PREPARSER ./calc input -stderr: -input: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error stderr: -input: - | (1 + # + 1) = 1111 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -160225,3202 +182752,1353 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input -stderr: -stderr: -1.7: syntax error Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr -stderr: -stderr: -stderr: -syntax error: invalid character: '#' +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) +Next token is token ')' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error -./calc.at:1448: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1458: $PREPARSER ./calc input -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1457: cat stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1455: cat stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1449: cat stderr -1.6: syntax error: invalid character: '#' -stdout: -stderr: -./calc.at:1446: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -input: -input: -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1455: $PREPARSER ./calc input -input: - | - | +1 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1458: cat stderr -stderr: -input: -input: -error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1446: $PREPARSER ./calc input -2.1: syntax error -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -stderr: -./calc.at:1458: $PREPARSER ./calc input -2.1: syntax error -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -549. calc.at:1477: testing Calculator glr2.cc %locations ... -./calc.at:1477: mv calc.y.tmp calc.y - -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -541. calc.at:1457: ok -./calc.at:1458: cat stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1455: cat stderr -./calc.at:1449: $PREPARSER ./calc /dev/null -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 30 -Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 10 -Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 -Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) Entering state 10 -Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 -Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 32 -Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 -Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 -Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) Entering state 32 -Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 -Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1480: cat stderr +input: +./calc.at:1479: cat stderr + | 1 2 +./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1455: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -1.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1479: $PREPARSER ./calc input +input: +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | (1 + #) = 1111 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1485: cat stderr +./calc.at:1482: cat stderr stderr: +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -1.1: syntax error input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +syntax error: invalid character: '#' + | (!!) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input + | 1//2 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1448: $PREPARSER ./calc input +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163430,62 +184108,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | 1 2 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr -./calc.at:1446: $PREPARSER ./calc input -stderr: -stdout: -stderr: -stderr: -./calc.at:1451: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: - | 1 + 2 * 3 + !- ++ - | (1 + 1) / (1 - 1) -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1449: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1448: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163495,248 +184119,187 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: -stderr: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 122): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -stderr: -stderr: -stderr: +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () stderr: +./calc.at:1480: cat stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1479: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 122): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1458: cat stderr -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () input: -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163746,10 +184309,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr - | (1 + #) = 1111 -./calc.at:1448: $PREPARSER ./calc input -542. calc.at:1458: ./calc.at:1449: "$PERL" -pi -e 'use strict; + | error +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163759,60 +184320,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1455: cat stderr - ok -input: -stderr: - | 1 2 -./calc.at:1451: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1446: $PREPARSER ./calc input -input: -stderr: -stderr: -1.3: syntax error - | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1449: cat stderr -./calc.at:1455: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 - +syntax error: invalid character: '#' stderr: -./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.3: syntax error -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +./calc.at:1485: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163822,176 +184340,147 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1482: cat stderr +stderr: +syntax error: invalid character: '#' +stderr: +syntax error, unexpected invalid token +input: + | (- *) + (1 2) = 1 +input: +./calc.at:1482: $PREPARSER ./calc input + | error +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-8: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -./calc.at:1478: mv calc.y.tmp calc.y - +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164001,151 +184490,133 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: + | error +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-8: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | (!!) + (1 2) = 1 -./calc.at:1448: cat stderr -./calc.at:1449: $PREPARSER ./calc input +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () stderr: -input: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164155,11 +184626,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1451: cat stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1479: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164169,24 +184652,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: -stderr: -./calc.at:1446: cat stderr -input: -1.2: syntax error: invalid character: '#' -stdout: -input: - | 1//2 -./calc.at:1451: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164197,15 +184663,9 @@ }eg ' expout || exit 77 input: - | (1 + #) = 1111 -./calc.at:1455: $PREPARSER ./calc input - | error -stderr: -./calc.at:1446: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164216,642 +184676,386 @@ }eg ' expout || exit 77 stderr: +syntax error: invalid character: '#' +./calc.at:1482: cat stderr +./calc.at:1486: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +input: +input: + | 1 = 2 = 3 +stderr: +syntax error: invalid character: '#' +./calc.at:1479: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1486: $PREPARSER ./calc input +input: +input: + | (* *) + (*) + (*) +./calc.at:1482: $PREPARSER ./calc input +stderr: + | 1 = 2 = 3 +syntax error, unexpected '=' +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +Error: discarding token '*' () Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -./calc.at:1449: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '=' stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -input: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1451: cat stderr -stderr: -./calc.at:1446: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1455: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: -stderr: - | 1 = 2 = 3 - | error -./calc.at:1451: $PREPARSER ./calc input -input: -stdout: -1.6: syntax error: invalid character: '#' -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... -./types.at:139: $PREPARSER ./test - | (# + 1) = 1111 -./calc.at:1455: $PREPARSER ./calc input -stderr: -./calc.at:1478: mv calc.y.tmp calc.y - -stderr: -1.1: syntax error -stderr: +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-4: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () Entering state 11 -Stack now 0 4 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-6: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr -stderr: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -444. types.at:139: ok -input: -./calc.at:1451: "$PERL" -pi -e 'use strict; +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164861,10 +185065,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: cat stderr - | (* *) + (*) + (*) -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164874,8 +185075,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1479: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164885,34 +185086,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1446: cat stderr - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1451: cat stderr -./calc.at:1455: cat stderr -./calc.at:1448: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error - -stderr: -1.11-17: error: null divisor -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -input: - | - | +1 -./calc.at:1446: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1451: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164922,41 +185096,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1455: $PREPARSER ./calc input -1.11-17: error: null divisor -stderr: -1.7: syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164966,296 +185107,188 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -446. types.at:139: Starting parse +./calc.at:1482: cat stderr +input: +input: +./calc.at:1486: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1485: cat stderr + | + | +1 +stderr: +./calc.at:1479: $PREPARSER ./calc input +error: null divisor +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | + | +1 +./calc.at:1485: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +error: null divisor +syntax error, unexpected '+' + | + | +1 +./calc.at:1486: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - ok -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr -stderr: -1.7: syntax error -stderr: +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +syntax error, unexpected '+' +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165265,8 +185298,75 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +./calc.at:1480: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165276,8 +185376,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165287,208 +185387,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1446: cat stderr -stderr: -./calc.at:1455: cat stderr -./calc.at:1451: cat stderr -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr -535. calc.at:1448: ok -./calc.at:1446: $PREPARSER ./calc /dev/null -stderr: -input: -input: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 - | (1 + 1) / (1 - 1) - | - | +1 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1455: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -stderr: -2.1: syntax error -stderr: -stderr: - -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +555. calc.at:1480: ok stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165498,158 +185466,105 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1485: cat stderr +stderr: +./calc.at:1486: $PREPARSER ./calc /dev/null +./calc.at:1479: cat stderr Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 4 12 20 29 Reading a token -Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1485: $PREPARSER ./calc /dev/null +./calc.at:1479: $PREPARSER ./calc /dev/null +stderr: + +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1451: "$PERL" -pi -e 'use strict; +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected end of input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165659,9 +185574,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr -552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1455: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +stderr: +./calc.at:1482: cat stderr +syntax error, unexpected end of input +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165671,9 +185594,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: mv calc.y.tmp calc.y - -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1486: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165683,1028 +185605,1123 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: cat stderr +./calc.at:1485: cat stderr +input: input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1451: cat stderr -540. calc.at:1455: ok -./calc.at:1449: cat stderr -./calc.at:1451: $PREPARSER ./calc /dev/null +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +input: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.1: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1449: $PREPARSER ./calc input +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 -Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1479: mv calc.y.tmp calc.y - -./calc.at:1446: cat stderr -./calc.at:1451: cat stderr -input: -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1451: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... -./calc.at:1480: mv calc.y.tmp calc.y - -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1449: cat stderr -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: - | (1 + #) = 1111 -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: $PREPARSER ./calc input Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166714,12 +186731,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1451: cat stderr -./calc.at:1446: cat stderr -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./calc.at:1486: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166729,175 +186744,129 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1451: $PREPARSER ./calc input -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1449: cat stderr stderr: -555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1480: mv calc.y.tmp calc.y - -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: - | (# + 1) = 1111 -./calc.at:1451: "$PERL" -pi -e 'use strict; + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: cat stderr +./calc.at:1479: $PREPARSER ./calc input +stderr: +input: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166907,524 +186876,457 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1451: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: cat stderr -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1451: $PREPARSER ./calc input -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1446: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1449: cat stderr -stderr: +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167434,14 +187336,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1451: cat stderr -./calc.at:1446: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1485: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167452,410 +187358,460 @@ }eg ' expout || exit 77 input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1446: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1449: cat stderr + | (1 + #) = 1111 +input: +./calc.at:1479: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1449: $PREPARSER ./calc input -stderr: +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.11-17: error: null divisor -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -1.11-17: error: null divisor +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: - | 1 + 2 * 3 + !- ++ -./calc.at:1446: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1449: cat stderr +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -536. calc.at:1449: ok - | 1 + 2 * 3 + !- ++ -./calc.at:1451: $PREPARSER ./calc input -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr - +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -input: - | (#) + (#) = 2222 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167865,279 +187821,256 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1451: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (#) + (#) = 2222 -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1446: cat stderr -input: -./calc.at:1451: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168147,235 +188080,205 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: cat stderr +./calc.at:1479: cat stderr stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +Error: discarding token '*' () Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 -Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -./calc.at:1451: $PREPARSER ./calc input -556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1482: mv calc.y.tmp calc.y - -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1446: cat stderr +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1485: cat stderr input: -./calc.at:1451: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168385,227 +188288,128 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input stderr: +input: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | (- *) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -input: - | (1 + # + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168615,263 +188419,136 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 stderr: Starting parse Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token ')' () Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token end of file () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1451: cat stderr -input: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1446: cat stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1486: cat stderr input: -stderr: - | (1 + 1) / (1 - 1) -1.6: syntax error: invalid character: '#' -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168881,2179 +188558,81 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: stderr: + | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Now at end of input. +Shifting token "end of input" () Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + 1) / (1 - 1) -./calc.at:1451: $PREPARSER ./calc input -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -stderr: -1.11-17: error: null divisor -./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -534. calc.at:1446: ok -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1451: cat stderr -537. calc.at:1451: ok - -stderr: -stdout: -./calc.at:1459: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -557. calc.at:1482: testing Calculator glr2.cc %debug ... -./calc.at:1482: mv calc.y.tmp calc.y - -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: cat stderr -558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | 1//2 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1459: cat stderr -input: - | error -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1459: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: cat stderr -input: - | - | +1 -./calc.at:1459: $PREPARSER ./calc input -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: cat stderr -./calc.at:1459: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1459: $PREPARSER ./calc input -stderr: -stdout: -stderr: -./calc.at:1468: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1468: $PREPARSER ./calc input -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: cat stderr -stderr: -input: - | 1 2 -input: -./calc.at:1468: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -stderr: -./calc.at:1459: $PREPARSER ./calc input -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -syntax error -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1468: cat stderr -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -stderr: -./calc.at:1468: $PREPARSER ./calc input -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: cat stderr -stderr: -syntax error -input: - | (* *) + (*) + (*) -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1468: cat stderr -./calc.at:1459: cat stderr -input: - | error -./calc.at:1468: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1459: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1459: $PREPARSER ./calc input -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: cat stderr -stderr: -input: - | 1 = 2 = 3 -./calc.at:1459: cat stderr -./calc.at:1468: $PREPARSER ./calc input -stderr: -input: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1459: $PREPARSER ./calc input -stderr: -syntax error -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: cat stderr -./calc.at:1468: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -input: - | - | +1 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1459: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1468: cat stderr -./calc.at:1459: cat stderr -./calc.at:1468: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -stderr: -syntax error -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: cat stderr -./calc.at:1468: cat stderr -input: -input: - | (1 + 1) / (1 - 1) -./calc.at:1459: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1468: $PREPARSER ./calc input -stderr: -stderr: -1.11-17: error: null divisor -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -1.11-17: error: null divisor -./calc.at:1459: cat stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -543. calc.at:1459: ok -stderr: -stdout: -./calc.at:1468: cat stderr -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | (!!) + (1 2) = 1 -./calc.at:1468: $PREPARSER ./calc input -input: - -stderr: -syntax error -error: 2222 != 1 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -stderr: -input: - | 1 2 -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1468: cat stderr -input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -input: - | 1//2 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -./calc.at:1468: cat stderr -559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -syntax error -syntax error -syntax error -input: - | error -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -./calc.at:1468: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1468: $PREPARSER ./calc input -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -input: -input: - | 1 + 2 * 3 + !- ++ - | 1 = 2 = 3 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: -input: - | - | +1 - | (#) + (#) = 2222 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -./calc.at:1476: cat stderr -./calc.at:1476: $PREPARSER ./calc /dev/null -input: -stderr: -syntax error - | (1 + #) = 1111 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: $PREPARSER ./calc input -input: -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | (# + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -stderr: -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: -./calc.at:1468: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -error: null divisor -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1468: cat stderr -stderr: -syntax error -syntax error -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -544. calc.at:1468: ok -stderr: -syntax error -syntax error -syntax error - -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1476: $PREPARSER ./calc input -560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... -stderr: -./calc.at:1486: mv calc.y.tmp calc.y - -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -stderr: -stdout: -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1476: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -stderr: -./calc.at:1476: $PREPARSER ./calc input -input: -stderr: - | 1 2 -./calc.at:1477: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error -syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1476: cat stderr -input: - | 1//2 -./calc.at:1477: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1476: $PREPARSER ./calc input -stderr: -1.3: syntax error -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1476: cat stderr -input: - | error -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.1: syntax error -input: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error -error: null divisor -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: -./calc.at:1476: cat stderr - | 1 = 2 = 3 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.7: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -546. calc.at:1476: ok -stderr: -1.7: syntax error - -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | - | +1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1477: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1486: mv calc.y.tmp calc.y - -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1477: cat stderr -stderr: -stdout: -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1478: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -1.3: syntax error -input: - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -input: - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1477: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -input: -./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | error -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1477: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -input: - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1477: cat stderr -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -input: -stderr: - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -2.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: -./calc.at:1478: cat stderr - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: $PREPARSER ./calc /dev/null -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1478: cat stderr -input: - | (1 + #) = 1111 -input: -./calc.at:1477: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1478: cat stderr -input: -input: - | (# + 1) = 1111 - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1477: cat stderr -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1477: cat stderr -input: -input: - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: $PREPARSER ./calc input -stderr: -stderr: -1.11-17: error: null divisor -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -input: -./calc.at:1477: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -548. calc.at:1477: ok -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... -stderr: -./calc.at:1487: mv calc.y.tmp calc.y - -1.6: syntax error: invalid character: '#' -./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1478: cat stderr -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (# + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -input: - | 1 2 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -./calc.at:1478: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -input: - | 1//2 -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1478: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: - | error -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: cat stderr -stderr: -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -550. calc.at:1478: syntax error, unexpected invalid token - ok -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1480: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '=' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '=' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '+' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -./calc.at:1480: cat stderr -./calc.at:1489: mv calc.y.tmp calc.y - -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1480: $PREPARSER ./calc /dev/null -stderr: -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -syntax error, unexpected end of input -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -stderr: +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -input: Starting parse Entering state 0 Reading a token @@ -171089,44 +188668,105 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -171135,6 +188775,18 @@ -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -171159,364 +188811,275 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token "number" (3) Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (3) -> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) Entering state 30 -Next token is token '=' () +Reading a token +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: cat stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Entering state 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1479: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -171526,9 +189089,9 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -171536,18 +189099,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 28 +Entering state 29 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token "number" (3) Shifting token "number" (3) @@ -171555,57 +189111,47 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (3) -> $$ = nterm exp (3) -Entering state 28 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () $3 = nterm exp (3) --> $$ = nterm exp (-4) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -171613,117 +189159,91 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Entering state 12 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -171731,11 +189251,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token "number" (3) Shifting token "number" (3) @@ -171743,156 +189263,116 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (3) -> $$ = nterm exp (3) -Entering state 32 +Entering state 30 Reading a token -Next token./calc.at:1485: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) - $2 = token '^' () + $2 = token '*' () $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -171901,19 +189381,15 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -171921,11 +189397,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -171933,59 +189409,104 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1485: cat stderr +input: +./calc.at:1482: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -171993,115 +189514,44 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () +Next token is token ')' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172109,40 +189559,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Entering state 12 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 +Entering state 19 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172150,42 +189571,78 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | (#) + (#) = 2222 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172193,46 +189650,44 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = nterm exp (-1) + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172240,590 +189695,788 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) - $2 = token '=' () + $2 = token '-' () $3 = nterm exp (1) --> $$ = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 +Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: + | (#) + (#) = 2222 +./calc.at:1486: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next tokensyntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 - is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +stdout: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1482: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1485: $PREPARSER ./calc input +557. calc.at:1482: ok +./calc.at:1486: cat stderr +stderr: +input: +input: + | (#) + (#) = 2222 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: input: + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input stderr: - | 1 2 -./calc.at:1482: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1485: $PREPARSER ./calc input + +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token "number" (2) -syntax error +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -172831,11 +190484,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -172843,23 +190496,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -172869,16 +190522,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -172887,21 +190540,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -172909,11 +190562,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -172925,29 +190578,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -172961,22 +190614,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -172985,12 +190638,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -172999,11 +190652,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173013,11 +190666,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -173025,23 +190678,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -173054,22 +190707,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -173078,12 +190731,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173097,16 +190750,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -173114,7 +190767,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -173125,16 +190778,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -173144,16 +190797,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -173162,12 +190815,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173176,11 +190829,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173198,28 +190851,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -173232,22 +190885,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -173256,12 +190909,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173270,21 +190923,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -173292,16 +190945,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -173311,16 +190964,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -173334,22 +190987,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -173358,22 +191011,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -173385,11 +191038,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -173397,16 +191050,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -173415,7 +191068,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -173423,7 +191076,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -173433,16 +191086,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -173451,12 +191104,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173465,21 +191118,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -173487,11 +191140,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -173499,23 +191152,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -173525,16 +191178,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -173543,12 +191196,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173558,11 +191211,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -173570,16 +191223,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -173588,7 +191241,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -173599,16 +191252,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -173618,16 +191271,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -173636,33 +191289,120 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -173670,11 +191410,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -173682,23 +191422,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -173708,16 +191448,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -173726,21 +191466,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -173748,11 +191488,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -173764,29 +191504,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -173800,22 +191540,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -173824,12 +191564,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173838,11 +191578,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173852,11 +191592,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -173864,23 +191604,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -173893,22 +191633,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -173917,12 +191657,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -173936,16 +191676,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -173953,7 +191693,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -173964,16 +191704,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -173983,16 +191723,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -174001,12 +191741,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -174015,11 +191755,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -174037,28 +191777,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -174071,22 +191811,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -174095,12 +191835,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -174109,21 +191849,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -174131,16 +191871,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -174150,16 +191890,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -174173,22 +191913,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -174197,22 +191937,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -174224,11 +191964,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -174236,16 +191976,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -174254,7 +191994,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -174262,7 +192002,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -174272,16 +192012,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -174290,12 +192030,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -174304,21 +192044,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -174326,11 +192066,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -174338,23 +192078,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -174364,16 +192104,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -174382,12 +192122,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -174397,11 +192137,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -174409,16 +192149,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -174427,7 +192167,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -174438,16 +192178,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -174457,16 +192197,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -174475,23 +192215,24 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1485: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174502,56 +192243,20 @@ }eg ' expout || exit 77 input: -input: | 1 2 -./calc.at:1485: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -stderr: +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) input: - | 1//2 -./calc.at:1482: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -174563,27 +192268,17 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () +Next token is token "number" (2) +syntax error, unexpected number Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token "number" (2) + | (# + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +./calc.at:1479: cat stderr +stderr: stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -174595,98 +192290,158 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () +Next token is token "number" (2) +syntax error, unexpected number Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1485: cat stderr -./calc.at:1480: cat stderr -input: - | 1//2 -./calc.at:1485: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: discarding lookahead token "number" (2) Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token Next token is token number (1) -Shifting token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: +input: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token Next token is token number (1) -Shifting token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: + | (1 + #) = 1111 +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174696,10 +192451,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | error -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: cat stderr ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -174710,63 +192461,158 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +syntax error: invalid character: '#' +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr ./calc.at:1485: cat stderr - | (* *) + (*) + (*) stderr: -./calc.at:1480: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1485: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1480: "$PERL" -pi -e 'use strict; +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y + +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174776,9 +192622,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1480: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; +input: +./calc.at:1479: cat stderr + | 1//2 +./calc.at:1489: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174788,15 +192640,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | 1 = 2 = 3 - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: cat stderr -stderr: stderr: +input: Starting parse Entering state 0 Reading a token @@ -174808,176 +192653,213 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: discarding lookahead token '/' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1486: cat stderr +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token '/' () stderr: +input: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: cat stderr -input: - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | (1 + # + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174987,153 +192869,194 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1485: $PREPARSER ./calc input -stderr: +./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1480: cat stderr -stderr: -Starting parse -Entering state 0 +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error stderr: - | (#) + (#) = 2222 -./calc.at:1480: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1482: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: $PREPARSER ./calc /dev/null -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: $PREPARSER ./calc /dev/null -./calc.at:1480: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -input: +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () - | (1 + #) = 1111 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: $PREPARSER ./calc input -stderr: +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175145,7 +193068,8 @@ ' expout || exit 77 stderr: syntax error: invalid character: '#' -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175155,8 +193079,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1486: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175166,19 +193090,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -input: -./calc.at:1480: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1485: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input stderr: +input: + | (1 + 1) / (1 - 1) +syntax error: invalid character: '#' +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: cat stderr stderr: +./calc.at:1485: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -175186,144 +193108,300 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 28 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) - $2 = token '+' () + $2 = token '-' () $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) --> $$ = nterm exp (3) +-> $$ = nterm exp (2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (0) +Entering state 31 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: + | (1 + # + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () @@ -175337,60 +193415,42 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) @@ -175398,21 +193458,50 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 12 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -175426,40 +193515,49 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -175467,221 +193565,253 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +560. calc.at:1486: ok +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1479: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: cat stderr stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +error: null divisor +./calc.at:1489: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1479: cat stderr stderr: -stdout: +567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +558. calc.at:1485: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1489: cat stderr +./calc.at:1489: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1489: $PREPARSER ./calc input stderr: +568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS Starting parse Entering state 0 Reading a token @@ -175690,7 +193820,7 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token ')' () @@ -175762,7 +193892,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -175793,7 +193923,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -175864,7 +193994,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -175929,6 +194059,8 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -175943,7 +194075,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -175958,11 +194090,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -175970,16 +194102,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -175989,16 +194121,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -176017,7 +194149,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176025,7 +194157,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -176057,7 +194189,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176065,7 +194197,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -176079,11 +194211,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -176091,16 +194223,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -176124,7 +194256,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176132,7 +194264,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -176142,16 +194274,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -176161,70 +194293,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1469: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -syntax error: invalid character: '#' -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176234,27 +194319,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -./calc.at:1482: cat stderr -./calc.at:1480: cat stderr -stderr: -input: -input: -input: - | 1 2 +./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1489: cat stderr input: -./calc.at:1469: $PREPARSER ./calc input - | (!!) + (1 2) = 1 - | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input | (!!) + (1 2) = 1 -stderr: -./calc.at:1482: $PREPARSER ./calc input -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -176270,7 +194339,7 @@ Next token is token '!' () Shifting token '!' () Entering state 15 -Reducing stack 0 by rule 16 (line 120): +Reducing stack 0 by rule 16 (line 107): $1 = token '!' () $2 = token '!' () Shifting token error () @@ -176279,7 +194348,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176294,28 +194363,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) +Next token is token "number" (2) syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176323,7 +194392,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -176333,16 +194402,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -176352,28 +194421,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +553. calc.at:1479: ok stderr: Starting parse Entering state 0 @@ -176422,7 +194486,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -176486,7 +194550,22 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +input: + + | (- *) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -176494,23 +194573,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176525,28 +194609,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) +Next token is token "number" (2) syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176554,7 +194638,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -176564,16 +194648,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -176583,23 +194667,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -syntax error: invalid character: '#' +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -176608,20 +194691,25 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -176648,7 +194736,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -176712,38 +194800,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176753,25 +194810,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1469: cat stderr -input: -input: -input: - | 1//2 - | (1 + 1) / (1 - 1) -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -stderr: +./calc.at:1489: cat stderr +569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + stderr: -syntax error -error: null divisor -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1485: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | (* *) + (*) + (*) +./calc.at:1489: $PREPARSER ./calc input stderr: -./calc.at:1482: cat stderr Starting parse Entering state 0 Reading a token @@ -176779,28 +194839,22 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -176815,104 +194869,43 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -error: null divisor -syntax error -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () @@ -176922,129 +194915,55 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1485: $PREPARSER ./calc input stderr: -./calc.at:1485: cat stderr Starting parse Entering state 0 Reading a token @@ -177052,22 +194971,16 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -177088,21 +195001,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -177116,38 +195020,52 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -177161,651 +195079,844 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1480: cat stderr -./calc.at:1469: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -input: - | (* *) + (*) + (*) -554. calc.at:1480: ./calc.at:1485: $PREPARSER ./calc input - ok - | error Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1469: $PREPARSER ./calc input -stderr: -stderr: -syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) + $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) + $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1469: cat stderr -stderr: -./calc.at:1485: cat stderr -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1469: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -177842,51 +195953,51 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) @@ -177912,61 +196023,138 @@ Shifting token '*' () Entering state 21 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token number (3) Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 30 +Entering state 10 Reading a token -Next token is token '+' () +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -177974,11 +196162,70 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token Next token is token number (2) Shifting token number (2) @@ -177986,11 +196233,185 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 +Entering state 32 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token number (3) Shifting token number (3) @@ -177998,44 +196419,57 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 30 +Entering state 28 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -Starting parse -Entering state 0 +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) @@ -178045,9 +196479,13 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (2) Shifting token number (2) @@ -178055,11 +196493,11 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 +Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token number (3) Shifting token number (3) @@ -178067,45 +196505,317 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 30 +Entering state 28 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) - $2 = token '*' () + $2 = token '-' () $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1469: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -stdout: -./types.at:139: $PREPARSER ./test +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () input: + | 1 2 +./calc.at:1485: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1489: cat stderr +./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1485: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -178172,25 +196882,9 @@ $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) - | - | +1 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -178256,18 +196950,34 @@ $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -447. types.at:139: ok -stderr: -syntax error + | 1//2 +./calc.at:1485: $PREPARSER ./calc input input: | 1 + 2 * 3 + !- ++ -./calc.at:1485: cat stderr -./calc.at:1482: $PREPARSER ./calc input -564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -./calc.at:1489: mv calc.y.tmp calc.y - +./calc.at:1489: $PREPARSER ./calc input stderr: -input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -178333,21 +197043,8 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -178414,210 +197111,26 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1469: cat stderr -./calc.at:1469: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -178628,7 +197141,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -178639,102 +197152,28 @@ }eg ' expout || exit 77 ./calc.at:1485: cat stderr -./calc.at:1482: cat stderr input: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 +./calc.at:1489: cat stderr + | error ./calc.at:1485: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () stderr: Starting parse Entering state 0 @@ -178833,87 +197272,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -179012,9 +197371,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1469: $PREPARSER ./calc input -stderr: ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -179025,20 +197381,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1485: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179049,168 +197393,44 @@ }eg ' expout || exit 77 input: - | (# + 1) = 1111 + | 1 = 2 = 3 ./calc.at:1485: $PREPARSER ./calc input -565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1482: cat stderr -./calc.at:1491: mv calc.y.tmp calc.y - +./calc.at:1489: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1469: cat stderr +stderr: +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -179289,19 +197509,37 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () stderr: -input: Starting parse Entering state 0 Reading a token @@ -179379,22 +197617,17 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | (!!) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1485: cat stderr -stderr: -syntax error -error: 2222 != 1 -input: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179404,6 +197637,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1489: cat stderr +input: +input: + | + | +1 +./calc.at:1485: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -179411,22 +197675,8 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -179435,15 +197685,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -179454,16 +197704,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -179472,22 +197722,42 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -179496,22 +197766,8 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -179520,15 +197776,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -179539,16 +197795,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -179557,23 +197813,34 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1485: $PREPARSER ./calc /dev/null +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179583,9 +197850,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () input: - | (# + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -179596,7 +197879,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1469: cat stderr +./calc.at:1485: cat stderr stderr: Starting parse Entering state 0 @@ -179605,8 +197888,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -179667,13 +197964,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (- *) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -input: Starting parse Entering state 0 Reading a token @@ -179681,8 +197974,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -179743,15 +198050,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | (1 + 1) / (1 - 1) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1485: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -179759,6 +198060,28 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -179779,26 +198102,102 @@ -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -179812,48 +198211,82 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -179867,11 +198300,8 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -syntax error -syntax error -error: 2222 != 1 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179881,6 +198311,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1489: cat stderr stderr: Starting parse Entering state 0 @@ -179889,6 +198320,28 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -179909,42 +198362,16 @@ -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) Shifting token number (1) @@ -179952,296 +198379,191 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 +Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () $3 = nterm exp (1) --> $$ = nterm exp (0) +-> $$ = nterm exp (3) Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1469: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1469: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -558. calc.at:1485: syntax error -syntax error -syntax error - ok -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -stderr: -stdout: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test - -stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1482: cat stderr -./calc.at:1469: cat stderr -449. types.at:139: ok input: | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input -input: +./calc.at:1489: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1469: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -180357,10 +198679,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -180477,14 +198796,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1469: $PREPARSER ./calc input -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180494,20 +198806,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1485: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180517,103 +198817,237 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1476: $PREPARSER ./calc input -556. calc.at:1482: ok -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -stderr: -input: input: - - | 1 2 -566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... - | (#) + (#) = 2222 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1491: mv calc.y.tmp calc.y - + | (!!) + (1 2) = 1 +./calc.at:1489: cat stderr +./calc.at:1485: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -syntax error -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1492: mv calc.y.tmp calc.y - -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | 1//2 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1469: cat stderr -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error - | (1 + #) = 1111 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1469: "$PERL" -pi -e 'use strict; +563. calc.at:1489: Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + ok +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180623,78 +199057,247 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: cat stderr -568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1492: mv calc.y.tmp calc.y +./calc.at:1485: cat stderr input: - | error -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1469: cat stderr -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -input: - | (# + 1) = 1111 -./calc.at:1469: $PREPARSER ./calc input -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1476: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1469: cat stderr -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1469: $PREPARSER ./calc input -syntax error + | (- *) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180704,109 +199307,262 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1476: cat stderr -input: - | - | +1 -./calc.at:1469: cat stderr -./calc.at:1476: $PREPARSER ./calc input +570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1485: cat stderr stderr: input: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -stderr: -error: null divisor -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1469: cat stderr -./calc.at:1476: $PREPARSER ./calc /dev/null -stderr: -syntax error -545. calc.at:1469: ok -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input stderr: -syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1487: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input stderr: -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: -syntax error -error: 2222 != 1 -569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1494: mv calc.y.tmp calc.y - -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180816,11 +199572,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: stdout: -./calc.at:1476: cat stderr -./calc.at:1479: "$PERL" -ne ' +./calc.at:1486: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -180829,13 +199582,9 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc input: - | (- *) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -input: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -180849,164 +199598,9 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1479: $PREPARSER ./calc input -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1476: cat stderr -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error, unexpected number - | (* *) + (*) + (*) -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1479: cat stderr -syntax error -syntax error -syntax error -input: - | 1//2 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1476: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1479: cat stderr -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected invalid token -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -syntax error, unexpected invalid token -stderr: -stderr: -stdout: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -450. types.at:139: ok -./calc.at:1479: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -stdout: - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - -./calc.at:1476: cat stderr -input: -stderr: -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1476: $PREPARSER ./calc input +./calc.at:1485: cat stderr +./calc.at:1487: $PREPARSER ./calc input input: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -181020,34 +199614,19 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -stderr: ./calc.at:1486: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '=' -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +input: + | 1 + 2 * 3 + !+ ++ stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181055,11 +199634,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -181067,23 +199646,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -181093,16 +199672,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -181111,21 +199690,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181133,11 +199712,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -181149,29 +199728,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -181185,22 +199764,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -181209,12 +199788,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181223,11 +199802,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181237,11 +199816,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -181249,23 +199828,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -181278,22 +199857,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -181302,12 +199881,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181321,16 +199900,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -181338,7 +199917,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -181349,16 +199928,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -181368,16 +199947,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -181386,12 +199965,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181400,11 +199979,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181422,28 +200001,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -181456,22 +200035,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -181480,12 +200059,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181494,21 +200073,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181516,16 +200095,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -181535,16 +200114,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -181558,22 +200137,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -181582,22 +200161,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181609,11 +200188,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -181621,16 +200200,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -181639,7 +200218,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -181647,7 +200226,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -181657,16 +200236,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -181675,12 +200254,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181689,21 +200268,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -181711,11 +200290,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -181723,23 +200302,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -181749,16 +200328,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -181767,12 +200346,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -181782,11 +200361,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -181794,16 +200373,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -181812,7 +200391,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -181823,16 +200402,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -181842,16 +200421,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -181860,32 +200439,34 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181893,11 +200474,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -181905,11 +200486,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -181931,11 +200512,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -181959,11 +200540,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -181971,11 +200552,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -181987,11 +200568,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -182023,11 +200604,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -182075,11 +200656,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -182087,11 +200668,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -182116,11 +200697,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -182159,11 +200740,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -182187,11 +200768,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -182206,11 +200787,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -182260,11 +200841,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -182294,11 +200875,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -182342,11 +200923,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -182354,11 +200935,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -182373,11 +200954,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -182396,11 +200977,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -182431,11 +201012,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -182447,11 +201028,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -182459,11 +201040,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -182495,11 +201076,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -182537,11 +201118,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -182549,11 +201130,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -182561,11 +201142,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -182587,11 +201168,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -182620,11 +201201,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -182632,11 +201213,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -182661,11 +201242,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -182680,11 +201261,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -182710,1108 +201291,988 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | 1 2 -./calc.at:1486: $PREPARSER ./calc input -input: - | - | +1 stderr: -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1476: cat stderr -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -syntax error, unexpected '+' - | (1 + #) = 1111 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1479: cat stderr -./calc.at:1486: cat stderr -./calc.at:1479: $PREPARSER ./calc /dev/null -stderr: -syntax error, unexpected end of input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -input: -./calc.at:1494: mv calc.y.tmp calc.y - - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -syntax error, unexpected end of input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1486: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: -syntax error: invalid character: '#' - | error -./calc.at:1486: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1476: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1479: cat stderr -input: -input: - | (!!) + (1 2) = 1 - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1476: $PREPARSER ./calc input -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 27 +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1479: cat stderr -./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -input: -input: - | - | +1 - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -Starting parse -Entering state 0 -Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1476: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1486: cat stderr -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1479: cat stderr -stderr: -error: null divisor -./calc.at:1486: $PREPARSER ./calc /dev/null -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input -stderr: -error: null divisor -stderr: -Starting parse -Entering state 0 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -stdout: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1486: cat stderr -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: -./calc.at:1479: cat stderr -./calc.at:1476: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -input: -stderr: -input: -547. calc.at:1476: ok - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -stderr: -./calc.at:1477: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) - $2 = token '+' () + $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 30 +Entering state 28 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) - $2 = token '*' () + $2 = token '-' () $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: - -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 2 -stderr: -./calc.at:1480: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Entering state 32 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (4) +Entering state 8 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '*' () +Next token is token '=' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) + $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (7) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -183820,414 +202281,198 @@ -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -1.3: syntax error -stderr: -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1478: $PREPARSER ./calc input -syntax error, unexpected number -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr - | 1 2 -input: - | (!!) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: cat stderr -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1479: cat stderr -input: -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-5) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1477: $PREPARSER ./calc input -input: -input: - | 1//2 -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -stderr: -stderr: -1.3: syntax error -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 10 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1480: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -input: - | 1//2 -./calc.at:1477: cat stderr -./calc.at:1478: $PREPARSER ./calc input -input: -571. calc.at:1504: testing Calculator lalr1.d ... -input: -./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: - | (1 + #) = 1111 -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -184236,655 +202481,574 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +-> $$ = nterm exp (1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | error -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1479: $PREPARSER ./calc input -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1477: $PREPARSER ./calc input -stderr: -stderr: -stderr: -stderr: -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -syntax error: invalid character: '#' -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -stderr: -syntax error, unexpected invalid token -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -stderr: -1.1: syntax error -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1486: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: - | error -input: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1479: cat stderr - | 1 = 2 = 3 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -syntax error, unexpected '=' -input: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 -1.1: syntax error -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -syntax error, unexpected '=' input: - | 1 = 2 = 3 -stderr: -./calc.at:1477: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.7: syntax error -571. calc.at:1504: ./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (calc.at:1504) -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1478: cat stderr - -./calc.at:1479: cat stderr -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1486: $PREPARSER ./calc input - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1477: cat stderr + | 1 2 input: -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1487: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -184892,11 +203056,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -184904,23 +203068,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -184937,116 +203101,43 @@ Next token is token '+' () Shifting token '+' () Entering state 14 -Reducing stack 0 by rule 17 (line 108): +Reducing stack 0 by rule 17 (line 121): $1 = token '!' () $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -input: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '+' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input -input: -stderr: - | - | +1 -syntax error: invalid character: '#' -./calc.at:1477: $PREPARSER ./calc input + | 1 2 +./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -stderr: -syntax error, unexpected '+' -1.7: syntax error -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -stderr: +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -1.7: syntax error +./calc.at:1485: $PREPARSER ./calc input stderr: -2.1: syntax error stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -185054,11 +203145,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -185066,23 +203157,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -185099,41 +203190,53 @@ Next token is token '-' () Shifting token '-' () Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Reducing stack 0 by rule 18 (line 122): $1 = token '!' () $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -185141,11 +203244,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -185153,23 +203256,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -185186,39 +203289,29 @@ Next token is token '-' () Shifting token '-' () Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Reducing stack 0 by rule 18 (line 122): $1 = token '!' () $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1479: cat stderr -./calc.at:1480: $PREPARSER ./calc /dev/null -./calc.at:1477: cat stderr stderr: -./calc.at:1478: cat stderr -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1487: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185228,47 +203321,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc /dev/null - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1485: cat stderr input: -1.1: syntax error -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of input - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -error: null divisor -./calc.at:1486: cat stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -2.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -error: null divisor input: -stderr: -2.1: syntax error | (#) + (#) = 2222 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1485: $PREPARSER ./calc input + | 1//2 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185282,6 +203342,28 @@ Starting parse Entering state 0 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -185298,7 +203380,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185325,7 +203407,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185333,7 +203415,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -185343,16 +203425,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) -> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) @@ -185361,38 +203443,73 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () input: + | 1//2 +./calc.at:1486: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -185409,7 +203526,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185436,7 +203553,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185444,7 +203561,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -185454,16 +203571,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) -> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) @@ -185472,62 +203589,47 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1479: cat stderr +./calc.at:1487: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -572. calc.at:1509: testing Calculator D ... -./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: cat stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; + | error +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185537,32 +203639,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc /dev/null -552. calc.at:1479: stderr: - ok -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185572,21 +203665,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1487: cat stderr +input: | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: - Starting parse Entering state 0 Reading a token @@ -185594,11 +203678,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -185620,7 +203704,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185631,16 +203715,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -185649,26 +203733,29 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -./calc.at:1478: cat stderr input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1486: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +./calc.at:1487: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -185676,11 +203763,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -185702,7 +203789,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185713,16 +203800,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -185731,73 +203818,98 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | (!!) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -input: -syntax error, unexpected number -error: 2222 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () stderr: -stderr: -stdout: -syntax error, unexpected number -error: 2222 != 1 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./types.at:139: $PREPARSER ./test -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1486: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185807,7 +203919,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -185817,21 +203929,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1487: cat stderr +./calc.at:1485: cat stderr +./calc.at:1486: cat stderr input: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -448. types.at:139: ok +input: + | + | +1 +./calc.at:1487: $PREPARSER ./calc input | (# + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 ./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () stderr: Starting parse Entering state 0 @@ -185850,15 +204001,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185869,16 +204020,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -185887,27 +204038,55 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -./calc.at:1478: cat stderr -input: stderr: -572. calc.at:1509: Starting parse +Starting parse Entering state 0 Reading a token Next token is token '(' () @@ -185924,15 +204103,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -185943,16 +204122,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -185961,43 +204140,61 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | (- *) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input - skipped (calc.at:1509) - -input: -input: - | (- *) + (1 2) = 1 - | (!!) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1477: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 - -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1487: $PREPARSER ./calc /dev/null stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -186007,25 +204204,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1486: cat stderr -573. calc.at:1510: testing Calculator D %locations ... -./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -186035,30 +204215,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1485: cat stderr input: - | (1 + # + 1) = 1111 + | + | +1 ./calc.at:1486: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | (1 + # + 1) = 1111 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () stderr: +./calc.at:1487: cat stderr Starting parse Entering state 0 Reading a token @@ -186066,11 +204281,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186090,15 +204305,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -186109,16 +204324,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -186127,24 +204342,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1478: cat stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -186153,11 +204366,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186177,15 +204390,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -186196,16 +204409,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -186214,29 +204427,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () input: - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1487: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -186247,15 +204456,257 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: cat stderr ./calc.at:1486: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1478: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -186265,24 +204716,276 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (* *) + (*) + (*) -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1485: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: | (1 + 1) / (1 - 1) -./calc.at:1486: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1487: cat stderr stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr +stdout: stderr: Starting parse Entering state 0 @@ -186291,11 +204994,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186303,16 +205006,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -186321,7 +205024,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -186336,11 +205039,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186348,16 +205051,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -186366,7 +205069,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -186374,7 +205077,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 106): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -186384,32 +205087,47 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -186417,11 +205135,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186429,16 +205147,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -186447,7 +205165,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -186462,11 +205180,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -186474,16 +205192,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -186492,7 +205210,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -186500,7 +205218,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 106): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -186510,428 +205228,248 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... -./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: cat stderr -575. calc.at:1514: testing Calculator D %debug ... -input: -./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1480: $PREPARSER ./calc input -573. calc.at:1510: stderr: -560. calc.at:1486: ok -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - skipped (calc.at:1510) -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -input: - | 1 + 2 * 3 + !- ++ -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input - -./calc.at:1478: cat stderr -stderr: -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1477: cat stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1480: cat stderr -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (#) + (#) = 2222 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -574. calc.at:1512: skipped (calc.at:1512) -./calc.at:1477: cat stderr -./calc.at:1478: cat stderr -575. calc.at:1514: ./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - skipped (calc.at:1514) -input: -./calc.at:1480: cat stderr - - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input -stderr: -input: - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -input: -1.6: syntax error: invalid character: '#' -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1480: $PREPARSER ./calc input -576. calc.at:1516: testing Calculator D parse.error=custom ... -stderr: -./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -syntax error: invalid character: '#' -577. calc.at:1517: testing Calculator D %locations parse.error=custom ... -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -stderr: -syntax error: invalid character: '#' -./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: cat stderr -input: -input: -stderr: - | (# + 1) = 1111 - | (# + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -1.2: syntax error: invalid character: '#' -stderr: -syntax error: invalid character: '#' -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -451. types.at:139: ok -stderr: -stdout: -input: -./calc.at:1477: cat stderr -./types.at:139: $PREPARSER ./test -./calc.at:1480: cat stderr - | (# + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... -stderr: -./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -576. calc.at:1516: input: - | (1 + # + 1) = 1111 -1.2: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - skipped (calc.at:1516) -./calc.at:1477: $PREPARSER ./calc input - - | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -579. calc.at:1519: testing Calculator D %locations parse.error=simple ... -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -577. calc.at:1517: 1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' - - skipped (calc.at:1517) -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -452. types.at:139: ok -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr - -./calc.at:1477: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11-17: error: null divisor -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -186941,74 +205479,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1478: cat stderr -error: null divisor -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -error: null divisor -./calc.at:1477: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -578. calc.at:1518: stderr: - skipped (calc.at:1518) -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -549. calc.at:1477: ok -579. calc.at:1519: stderr: -1.11-17: error: null divisor -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - skipped (calc.at:1519) -./calc.at:1480: cat stderr -580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y - -581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... -./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -555. calc.at:1480: ok - -./calc.at:1478: cat stderr - -582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... -./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... -./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -stdout: -551. calc.at:1478: ok -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1485: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -187023,28 +205498,281 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1489: $PREPARSER ./calc input - +./calc.at:1487: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +559. calc.at:1485: ok +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1487: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -187052,11 +205780,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -187078,11 +205806,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -187106,11 +205834,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -187118,11 +205846,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -187134,11 +205862,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -187170,11 +205898,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -187222,11 +205950,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -187234,11 +205962,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -187263,11 +205991,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -187306,11 +206034,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -187334,11 +206062,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -187353,11 +206081,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -187407,11 +206135,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -187441,11 +206169,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -187489,11 +206217,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -187501,11 +206229,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -187520,11 +206248,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -187543,11 +206271,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -187578,11 +206306,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -187594,11 +206322,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -187606,11 +206334,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -187642,11 +206370,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -187684,11 +206412,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -187696,11 +206424,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -187708,11 +206436,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -187734,11 +206462,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -187767,11 +206495,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -187779,11 +206507,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -187808,11 +206536,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -187827,11 +206555,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -187857,20 +206585,270 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: + +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -187878,11 +206856,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -187890,11 +206868,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -187916,11 +206894,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -187944,11 +206922,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -187956,11 +206934,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -187972,11 +206950,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -188008,11 +206986,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -188060,11 +207038,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188072,11 +207050,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -188101,11 +207079,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188144,11 +207122,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188172,11 +207150,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -188191,11 +207169,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -188245,11 +207223,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188279,11 +207257,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188327,11 +207305,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -188339,11 +207317,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -188358,11 +207336,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -188381,11 +207359,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -188416,11 +207394,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -188432,11 +207410,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -188444,11 +207422,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -188480,11 +207458,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -188522,11 +207500,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -188534,11 +207512,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -188546,11 +207524,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -188572,11 +207550,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -188605,11 +207583,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -188617,11 +207595,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -188646,11 +207624,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -188665,11 +207643,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -188695,988 +207673,1094 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -input: - | 1 2 -580. calc.at:1520: skipped (calc.at:1520) -581. calc.at:1521: 584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... -./calc.at:1489: $PREPARSER ./calc input - skipped (calc.at:1521) -./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... -./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y - -stderr: -583. calc.at:1524: 582. calc.at:1523: Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) - skipped (calc.at:1524) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... - - skipped (calc.at:1523) -./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) - - -587. calc.at:1532: testing Calculator D api.push-pull=both ... -./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1489: cat stderr -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input - | 1//2 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 +Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: cat stderr +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Entering state 8 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +stderr: +./calc.at:1487: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) + | (* *) + (*) + (*) +./calc.at:1487: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1486: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1487: cat stderr +./calc.at:1489: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; + | 1//2 +input: +./calc.at:1489: $PREPARSER ./calc input +571. calc.at:1504: testing Calculator lalr1.d ... + | 1 + 2 * 3 + !+ ++ +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -189687,15 +208771,35 @@ }eg ' expout || exit 77 stderr: -./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189703,11 +208807,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -189715,65 +208819,81 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: cat stderr +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () + | 1 + 2 * 3 + !- ++ +./calc.at:1487: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189781,11 +208901,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -189793,845 +208913,980 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: cat stderr +./calc.at:1489: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1487: $PREPARSER ./calc input +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1489: $PREPARSER ./calc input +571. calc.at:1504: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +stderr: + skipped (calc.at:1504) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 +Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1486: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +input: +./calc.at:1487: cat stderr + + | 1 + 2 * 3 + !+ ++ +./calc.at:1486: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 28 +Entering state 29 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) -Entering state 28 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) -Entering state 32 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) - $2 = token '^' () + $2 = token '*' () $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: cat stderr +stderr: +input: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -584. calc.at:1525: input: + | 1 + 2 * 3 + !- ++ +./calc.at:1486: $PREPARSER ./calc input +stderr: input: - | 1 2 - skipped (calc.at:1525) -./calc.at:1482: $PREPARSER ./calc input - | error + | 1 = 2 = 3 ./calc.at:1489: $PREPARSER ./calc input -587. calc.at:1532: stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) - skipped (calc.at:1532) -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -589. calc.at:1544: testing Calculator Java ... -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -Starting parse -Entering state 0 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -586. calc.at:1531: - skipped (calc.at:1531) -590. calc.at:1545: testing Calculator Java parse.error=custom ... -./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -591. calc.at:1546: testing Calculator Java parse.error=detailed ... - -./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -585. calc.at:1530: 592. calc.at:1547: testing Calculator Java parse.error=verbose ... -./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - skipped (calc.at:1530) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr - -./calc.at:1482: cat stderr -input: - | 1 = 2 = 3 -input: - | 1//2 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr stderr: +input: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190639,11 +209894,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -190653,58 +209908,83 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190712,11 +209992,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -190726,8 +210006,79 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () -588. calc.at:1533: skipped (calc.at:1533) -./calc.at:1482: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190737,7 +210088,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +572. calc.at:1509: testing Calculator D ... +./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1486: cat stderr ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -190748,30 +210170,303 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -591. calc.at:1546: ./calc.at:1482: cat stderr - skipped (calc.at:1546) -./calc.at:1489: cat stderr -589. calc.at:1544: skipped (calc.at:1544) +./calc.at:1487: cat stderr input: - - | error input: -./calc.at:1482: $PREPARSER ./calc input - | - | +1 -./calc.at:1489: $PREPARSER ./calc input - + | (#) + (#) = 2222 + | (1 + # + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: cat stderr stderr: -593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | + | +1 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input stderr: +572. calc.at:1509: stderr: + skipped (calc.at:1509) Starting parse Entering state 0 Reading a token @@ -190792,15 +210487,91 @@ Error: popping nterm input () Cleanup: discarding lookahead token '+' () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -590. calc.at:1545: stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + +stderr: Starting parse Entering state 0 Reading a token @@ -190820,16 +210591,7 @@ syntax error, unexpected '+' Error: popping nterm input () Cleanup: discarding lookahead token '+' () - skipped (calc.at:1545) -592. calc.at:1547: ./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - skipped (calc.at:1547) -596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... -./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... - -./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190839,6 +210601,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1487: cat stderr ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -190849,12 +210612,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +./calc.at:1486: cat stderr +input: + | (1 + 1) / (1 - 1) ./calc.at:1489: cat stderr +./calc.at:1487: $PREPARSER ./calc input input: - | 1 = 2 = 3 ./calc.at:1489: $PREPARSER ./calc /dev/null -./calc.at:1482: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input stderr: stderr: Starting parse @@ -190862,75 +210628,404 @@ Reading a token Now at end of input. syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () +Cleanup: discarding lookahead token end of input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token Now at end of input. syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () +Cleanup: discarding lookahead token end of input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -190941,7 +211036,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: cat stderr +573. calc.at:1510: testing Calculator D %locations ... +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190951,23 +211048,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... -./calc.at:1489: cat stderr -./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... -./calc.at:1482: cat stderr input: -./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1487: cat stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1489: $PREPARSER ./calc input -593. calc.at:1548: input: - skipped (calc.at:1548) - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -598. calc.at:1554: testing Calculator Java api.push-pull=both ... -./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: +./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +562. calc.at:1487: ok +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 @@ -190998,11 +211085,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -191010,11 +211097,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -191029,11 +211116,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -191119,11 +211206,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -191131,11 +211218,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 30 Reading a token @@ -191182,11 +211269,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -191212,56 +211299,18 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input stderr: - Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -594. calc.at:1549: Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -191288,11 +211337,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -191300,11 +211349,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -191319,11 +211368,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -191409,11 +211458,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -191421,11 +211470,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 30 Reading a token @@ -191472,11 +211521,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -191502,61 +211551,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -595. calc.at:1550: skipped (calc.at:1549) -596. calc.at:1551: skipped (calc.at:1550) - skipped (calc.at:1551) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... - -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... -./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: cat stderr -./calc.at:1482: cat stderr -./calc.at:1482: $PREPARSER ./calc /dev/null -input: - | (!!) + (1 2) = 1 -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () stderr: Starting parse Entering state 0 @@ -191565,52 +211564,18 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Error: discarding token '+' () Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -191622,40 +211587,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 +Entering state 8 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -191665,11 +211622,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -191678,52 +211635,18 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Error: discarding token '+' () Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -191735,40 +211658,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -191778,11 +211693,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -191792,7 +211707,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1489: cat stderr +573. calc.at:1510: skipped (calc.at:1510) +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -191802,28 +211719,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1489: cat stderr -input: -597. calc.at:1552: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 + + | (!!) + (1 2) = 1 ./calc.at:1489: $PREPARSER ./calc input - skipped (calc.at:1552) stderr: +stdout: +./calc.at:1486: cat stderr +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + stderr: -599. calc.at:1555: Starting parse +Starting parse Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 +Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 @@ -191842,61 +211776,23 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -191908,37 +211804,77 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +stderr: +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -191946,15 +211882,8 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -191963,45 +211892,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -192017,36 +211922,36 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -192056,12 +211961,12 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -598. calc.at:1554: +stderr: +input: Starting parse Entering state 0 Reading a token @@ -192069,21 +211974,32 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -192097,29 +212013,104 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... +./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error, unexpected number +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -192131,40 +212122,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -192174,137 +212157,886 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1555) - skipped (calc.at:1554) +./calc.at:1489: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -192315,15 +213047,8 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -192332,45 +213057,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -192386,36 +213087,36 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -192425,11 +213126,16 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +575. calc.at:1514: testing Calculator D %debug ... +./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -192473,21 +213179,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) +Next token is token number (2) syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -192512,11 +213218,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -192542,14 +213248,1101 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () - - -602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... -./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1491: $PREPARSER ./calc input +stderr: +574. calc.at:1512: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -192560,7 +214353,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1512) +./calc.at:1489: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192570,16 +214383,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -input: + | (* *) + (*) + (*) ./calc.at:1489: $PREPARSER ./calc input -601. calc.at:1557: 600. calc.at:1556: 604. torture.at:132: testing Big triangle ... -./calc.at:1482: cat stderr -./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr stderr: - skipped (calc.at:1557) - skipped (calc.at:1556) +./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -192691,17 +214510,13 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (!!) + (1 2) = 1 -605. torture.at:216: testing Big horizontal ... -./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 stderr: -./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -192813,36 +214628,482 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () + | 1//2 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +575. calc.at:1514: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1514) +561. calc.at:1486: ok +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) -603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1489: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: -./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | 1 + 2 * 3 + !+ ++ +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1491: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1489: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token Next token is token '!' () Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () - $2 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1491: cat stderr +576. calc.at:1516: testing Calculator D parse.error=custom ... +./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1491: $PREPARSER ./calc input +stderr: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (#) + (#) = 2222 +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +577. calc.at:1517: testing Calculator D %locations parse.error=custom ... +./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -192860,21 +215121,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -192899,20 +215151,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -192929,12 +215180,25 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... stderr: +./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y Starting parse Entering state 0 Reading a token @@ -192942,20 +215206,16 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -192973,21 +215233,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -193012,20 +215263,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -193042,10 +215292,14 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () +input: + | + | +1 +./calc.at:1491: $PREPARSER ./calc input ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -193056,8 +215310,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +576. calc.at:1516: ./calc.at:1489: cat stderr + skipped (calc.at:1516) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +input: + | (1 + #) = 1111 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193067,1437 +215366,461 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -stdout: -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#define MAX 1000 -static int yylex (void); -#include +stderr: -/* !POSIX */ static void yyerror (const char *msg); -%} +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1491: $PREPARSER ./calc /dev/null +stderr: +577. calc.at:1517: Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) + skipped (calc.at:1517) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 -%token - t1 1 "1" - t2 2 "2" - t3 3 "3" - t4 4 "4" - t5 5 "5" - t6 6 "6" - t7 7 "7" - t8 8 "8" - t9 9 "9" - t10 10 "10" - t11 11 "11" - t12 12 "12" - t13 13 "13" - t14 14 "14" - t15 15 "15" - t16 16 "16" - t17 17 "17" - t18 18 "18" - t19 19 "19" - t20 20 "20" - t21 21 "21" - t22 22 "22" - t23 23 "23" - t24 24 "24" - t25 25 "25" - t26 26 "26" - t27 27 "27" - t28 28 "28" - t29 29 "29" - t30 30 "30" - t31 31 "31" - t32 32 "32" - t33 33 "33" - t34 34 "34" - t35 35 "35" - t36 36 "36" - t37 37 "37" - t38 38 "38" - t39 39 "39" - t40 40 "40" - t41 41 "41" - t42 42 "42" - t43 43 "43" - t44 44 "44" - t45 45 "45" - t46 46 "46" - t47 47 "47" - t48 48 "48" - t49 49 "49" - t50 50 "50" - t51 51 "51" - t52 52 "52" - t53 53 "53" - t54 54 "54" - t55 55 "55" - t56 56 "56" - t57 57 "57" - t58 58 "58" - t59 59 "59" - t60 60 "60" - t61 61 "61" - t62 62 "62" - t63 63 "63" - t64 64 "64" - t65 65 "65" - t66 66 "66" - t67 67 "67" - t68 68 "68" - t69 69 "69" - t70 70 "70" - t71 71 "71" - t72 72 "72" - t73 73 "73" - t74 74 "74" - t75 75 "75" - t76 76 "76" - t77 77 "77" - t78 78 "78" - t79 79 "79" - t80 80 "80" - t81 81 "81" - t82 82 "82" - t83 83 "83" - t84 84 "84" - t85 85 "85" - t86 86 "86" - t87 87 "87" - t88 88 "88" - t89 89 "89" - t90 90 "90" - t91 91 "91" - t92 92 "92" - t93 93 "93" - t94 94 "94" - t95 95 "95" - t96 96 "96" - t97 97 "97" - t98 98 "98" - t99 99 "99" - t100 100 "100" - t101 101 "101" - t102 102 "102" - t103 103 "103" - t104 104 "104" - t105 105 "105" - t106 106 "106" - t107 107 "107" - t108 108 "108" - t109 109 "109" - t110 110 "110" - t111 111 "111" - t112 112 "112" - t113 113 "113" - t114 114 "114" - t115 115 "115" - t116 116 "116" - t117 117 "117" - t118 118 "118" - t119 119 "119" - t120 120 "120" - t121 121 "121" - t122 122 "122" - t123 123 "123" - t124 124 "124" - t125 125 "125" - t126 126 "126" - t127 127 "127" - t128 128 "128" - t129 129 "129" - t130 130 "130" - t131 131 "131" - t132 132 "132" - t133 133 "133" - t134 134 "134" - t135 135 "135" - t136 136 "136" - t137 137 "137" - t138 138 "138" - t139 139 "139" - t140 140 "140" - t141 141 "141" - t142 142 "142" - t143 143 "143" - t144 144 "144" - t145 145 "145" - t146 146 "146" - t147 147 "147" - t148 148 "148" - t149 149 "149" - t150 150 "150" - t151 151 "151" - t152 152 "152" - t153 153 "153" - t154 154 "154" - t155 155 "155" - t156 156 "156" - t157 157 "157" - t158 158 "158" - t159 159 "159" - t160 160 "160" - t161 161 "161" - t162 162 "162" - t163 163 "163" - t164 164 "164" - t165 165 "165" - t166 166 "166" - t167 167 "167" - t168 168 "168" - t169 169 "169" - t170 170 "170" - t171 171 "171" - t172 172 "172" - t173 173 "173" - t174 174 "174" - t175 175 "175" - t176 176 "176" - t177 177 "177" - t178 178 "178" - t179 179 "179" - t180 180 "180" - t181 181 "181" - t182 182 "182" - t183 183 "183" - t184 184 "184" - t185 185 "185" - t186 186 "186" - t187 187 "187" - t188 188 "188" - t189 189 "189" - t190 190 "190" - t191 191 "191" - t192 192 "192" - t193 193 "193" - t194 194 "194" - t195 195 "195" - t196 196 "196" - t197 197 "197" - t198 198 "198" - t199 199 "199" - t200 200 "200" - t201 201 "201" - t202 202 "202" - t203 203 "203" - t204 204 "204" - t205 205 "205" - t206 206 "206" - t207 207 "207" - t208 208 "208" - t209 209 "209" - t210 210 "210" - t211 211 "211" - t212 212 "212" - t213 213 "213" - t214 214 "214" - t215 215 "215" - t216 216 "216" - t217 217 "217" - t218 218 "218" - t219 219 "219" - t220 220 "220" - t221 221 "221" - t222 222 "222" - t223 223 "223" - t224 224 "224" - t225 225 "225" - t226 226 "226" - t227 227 "227" - t228 228 "228" - t229 229 "229" - t230 230 "230" - t231 231 "231" - t232 232 "232" - t233 233 "233" - t234 234 "234" - t235 235 "235" - t236 236 "236" - t237 237 "237" - t238 238 "238" - t239 239 "239" - t240 240 "240" - t241 241 "241" - t242 242 "242" - t243 243 "243" - t244 244 "244" - t245 245 "245" - t246 246 "246" - t247 247 "247" - t248 248 "248" - t249 249 "249" - t250 250 "250" - t251 251 "251" - t252 252 "252" - t253 253 "253" - t254 254 "254" - t255 255 "255" - t256 256 "256" - t257 257 "257" - t258 258 "258" - t259 259 "259" - t260 260 "260" - t261 261 "261" - t262 262 "262" - t263 263 "263" - t264 264 "264" - t265 265 "265" - t266 266 "266" - t267 267 "267" - t268 268 "268" - t269 269 "269" - t270 270 "270" - t271 271 "271" - t272 272 "272" - t273 273 "273" - t274 274 "274" - t275 275 "275" - t276 276 "276" - t277 277 "277" - t278 278 "278" - t279 279 "279" - t280 280 "280" - t281 281 "281" - t282 282 "282" - t283 283 "283" - t284 284 "284" - t285 285 "285" - t286 286 "286" - t287 287 "287" - t288 288 "288" - t289 289 "289" - t290 290 "290" - t291 291 "291" - t292 292 "292" - t293 293 "293" - t294 294 "294" - t295 295 "295" - t296 296 "296" - t297 297 "297" - t298 298 "298" - t299 299 "299" - t300 300 "300" - t301 301 "301" - t302 302 "302" - t303 303 "303" - t304 304 "304" - t305 305 "305" - t306 306 "306" - t307 307 "307" - t308 308 "308" - t309 309 "309" - t310 310 "310" - t311 311 "311" - t312 312 "312" - t313 313 "313" - t314 314 "314" - t315 315 "315" - t316 316 "316" - t317 317 "317" - t318 318 "318" - t319 319 "319" - t320 320 "320" - t321 321 "321" - t322 322 "322" - t323 323 "323" - t324 324 "324" - t325 325 "325" - t326 326 "326" - t327 327 "327" - t328 328 "328" - t329 329 "329" - t330 330 "330" - t331 331 "331" - t332 332 "332" - t333 333 "333" - t334 334 "334" - t335 335 "335" - t336 336 "336" - t337 337 "337" - t338 338 "338" - t339 339 "339" - t340 340 "340" - t341 341 "341" - t342 342 "342" - t343 343 "343" - t344 344 "344" - t345 345 "345" - t346 346 "346" - t347 347 "347" - t348 348 "348" - t349 349 "349" - t350 350 "350" - t351 351 "351" - t352 352 "352" - t353 353 "353" - t354 354 "354" - t355 355 "355" - t356 356 "356" - t357 357 "357" - t358 358 "358" - t359 359 "359" - t360 360 "360" - t361 361 "361" - t362 362 "362" - t363 363 "363" - t364 364 "364" - t365 365 "365" - t366 366 "366" - t367 367 "367" - t368 368 "368" - t369 369 "369" - t370 370 "370" - t371 371 "371" - t372 372 "372" - t373 373 "373" - t374 374 "374" - t375 375 "375" - t376 376 "376" - t377 377 "377" - t378 378 "378" - t379 379 "379" - t380 380 "380" - t381 381 "381" - t382 382 "382" - t383 383 "383" - t384 384 "384" - t385 385 "385" - t386 386 "386" - t387 387 "387" - t388 388 "388" - t389 389 "389" - t390 390 "390" - t391 391 "391" - t392 392 "392" - t393 393 "393" - t394 394 "394" - t395 395 "395" - t396 396 "396" - t397 397 "397" - t398 398 "398" - t399 399 "399" - t400 400 "400" - t401 401 "401" - t402 402 "402" - t403 403 "403" - t404 404 "404" - t405 405 "405" - t406 406 "406" - t407 407 "407" - t408 408 "408" - t409 409 "409" - t410 410 "410" - t411 411 "411" - t412 412 "412" - t413 413 "413" - t414 414 "414" - t415 415 "415" - t416 416 "416" - t417 417 "417" - t418 418 "418" - t419 419 "419" - t420 420 "420" - t421 421 "421" - t422 422 "422" - t423 423 "423" - t424 424 "424" - t425 425 "425" - t426 426 "426" - t427 427 "427" - t428 428 "428" - t429 429 "429" - t430 430 "430" - t431 431 "431" - t432 432 "432" - t433 433 "433" - t434 434 "434" - t435 435 "435" - t436 436 "436" - t437 437 "437" - t438 438 "438" - t439 439 "439" - t440 440 "440" - t441 441 "441" - t442 442 "442" - t443 443 "443" - t444 444 "444" - t445 445 "445" - t446 446 "446" - t447 447 "447" - t448 448 "448" - t449 449 "449" - t450 450 "450" - t451 451 "451" - t452 452 "452" - t453 453 "453" - t454 454 "454" - t455 455 "455" - t456 456 "456" - t457 457 "457" - t458 458 "458" - t459 459 "459" - t460 460 "460" - t461 461 "461" - t462 462 "462" - t463 463 "463" - t464 464 "464" - t465 465 "465" - t466 466 "466" - t467 467 "467" - t468 468 "468" - t469 469 "469" - t470 470 "470" - t471 471 "471" - t472 472 "472" - t473 473 "473" - t474 474 "474" - t475 475 "475" - t476 476 "476" - t477 477 "477" - t478 478 "478" - t479 479 "479" - t480 480 "480" - t481 481 "481" - t482 482 "482" - t483 483 "483" - t484 484 "484" - t485 485 "485" - t486 486 "486" - t487 487 "487" - t488 488 "488" - t489 489 "489" - t490 490 "490" - t491 491 "491" - t492 492 "492" - t493 493 "493" - t494 494 "494" - t495 495 "495" - t496 496 "496" - t497 497 "497" - t498 498 "498" - t499 499 "499" - t500 500 "500" - t501 501 "501" - t502 502 "502" - t503 503 "503" - t504 504 "504" - t505 505 "505" - t506 506 "506" - t507 507 "507" - t508 508 "508" - t509 509 "509" - t510 510 "510" - t511 511 "511" - t512 512 "512" - t513 513 "513" - t514 514 "514" - t515 515 "515" - t516 516 "516" - t517 517 "517" - t518 518 "518" - t519 519 "519" - t520 520 "520" - t521 521 "521" - t522 522 "522" - t523 523 "523" - t524 524 "524" - t525 525 "525" - t526 526 "526" - t527 527 "527" - t528 528 "528" - t529 529 "529" - t530 530 "530" - t531 531 "531" - t532 532 "532" - t533 533 "533" - t534 534 "534" - t535 535 "535" - t536 536 "536" - t537 537 "537" - t538 538 "538" - t539 539 "539" - t540 540 "540" - t541 541 "541" - t542 542 "542" - t543 543 "543" - t544 544 "544" - t545 545 "545" - t546 546 "546" - t547 547 "547" - t548 548 "548" - t549 549 "549" - t550 550 "550" - t551 551 "551" - t552 552 "552" - t553 553 "553" - t554 554 "554" - t555 555 "555" - t556 556 "556" - t557 557 "557" - t558 558 "558" - t559 559 "559" - t560 560 "560" - t561 561 "561" - t562 562 "562" - t563 563 "563" - t564 564 "564" - t565 565 "565" - t566 566 "566" - t567 567 "567" - t568 568 "568" - t569 569 "569" - t570 570 "570" - t571 571 "571" - t572 572 "572" - t573 573 "573" - t574 574 "574" - t575 575 "575" - t576 576 "576" - t577 577 "577" - t578 578 "578" - t579 579 "579" - t580 580 "580" - t581 581 "581" - t582 582 "582" - t583 583 "583" - t584 584 "584" - t585 585 "585" - t586 586 "586" - t587 587 "587" - t588 588 "588" - t589 589 "589" - t590 590 "590" - t591 591 "591" - t592 592 "592" - t593 593 "593" - t594 594 "594" - t595 595 "595" - t596 596 "596" - t597 597 "597" - t598 598 "598" - t599 599 "599" - t600 600 "600" - t601 601 "601" - t602 602 "602" - t603 603 "603" - t604 604 "604" - t605 605 "605" - t606 606 "606" - t607 607 "607" - t608 608 "608" - t609 609 "609" - t610 610 "610" - t611 611 "611" - t612 612 "612" - t613 613 "613" - t614 614 "614" - t615 615 "615" - t616 616 "616" - t617 617 "617" - t618 618 "618" - t619 619 "619" - t620 620 "620" - t621 621 "621" - t622 622 "622" - t623 623 "623" - t624 624 "624" - t625 625 "625" - t626 626 "626" - t627 627 "627" - t628 628 "628" - t629 629 "629" - t630 630 "630" - t631 631 "631" - t632 632 "632" - t633 633 "633" - t634 634 "634" - t635 635 "635" - t636 636 "636" - t637 637 "637" - t638 638 "638" - t639 639 "639" - t640 640 "640" - t641 641 "641" - t642 642 "642" - t643 643 "643" - t644 644 "644" - t645 645 "645" - t646 646 "646" - t647 647 "647" - t648 648 "648" - t649 649 "649" - t650 650 "650" - t651 651 "651" - t652 652 "652" - t653 653 "653" - t654 654 "654" - t655 655 "655" - t656 656 "656" - t657 657 "657" - t658 658 "658" - t659 659 "659" - t660 660 "660" - t661 661 "661" - t662 662 "662" - t663 663 "663" - t664 664 "664" - t665 665 "665" - t666 666 "666" - t667 667 "667" - t668 668 "668" - t669 669 "669" - t670 670 "670" - t671 671 "671" - t672 672 "672" - t673 673 "673" - t674 674 "674" - t675 675 "675" - t676 676 "676" - t677 677 "677" - t678 678 "678" - t679 679 "679" - t680 680 "680" - t681 681 "681" - t682 682 "682" - t683 683 "683" - t684 684 "684" - t685 685 "685" - t686 686 "686" - t687 687 "687" - t688 688 "688" - t689 689 "689" - t690 690 "690" - t691 691 "691" - t692 692 "692" - t693 693 "693" - t694 694 "694" - t695 695 "695" - t696 696 "696" - t697 697 "697" - t698 698 "698" - t699 699 "699" - t700 700 "700" - t701 701 "701" - t702 702 "702" - t703 703 "703" - t704 704 "704" - t705 705 "705" - t706 706 "706" - t707 707 "707" - t708 708 "708" - t709 709 "709" - t710 710 "710" - t711 711 "711" - t712 712 "712" - t713 713 "713" - t714 714 "714" - t715 715 "715" - t716 716 "716" - t717 717 "717" - t718 718 "718" - t719 719 "719" - t720 720 "720" - t721 721 "721" - t722 722 "722" - t723 723 "723" - t724 724 "724" - t725 725 "725" - t726 726 "726" - t727 727 "727" - t728 728 "728" - t729 729 "729" - t730 730 "730" - t731 731 "731" - t732 732 "732" - t733 733 "733" - t734 734 "734" - t735 735 "735" - t736 736 "736" - t737 737 "737" - t738 738 "738" - t739 739 "739" - t740 740 "740" - t741 741 "741" - t742 742 "742" - t743 743 "743" - t744 744 "744" - t745 745 "745" - t746 746 "746" - t747 747 "747" - t748 748 "748" - t749 749 "749" - t750 750 "750" - t751 751 "751" - t752 752 "752" - t753 753 "753" - t754 754 "754" - t755 755 "755" - t756 756 "756" - t757 757 "757" - t758 758 "758" - t759 759 "759" - t760 760 "760" - t761 761 "761" - t762 762 "762" - t763 763 "763" - t764 764 "764" - t765 765 "765" - t766 766 "766" - t767 767 "767" - t768 768 "768" - t769 769 "769" - t770 770 "770" - t771 771 "771" - t772 772 "772" - t773 773 "773" - t774 774 "774" - t775 775 "775" - t776 776 "776" - t777 777 "777" - t778 778 "778" - t779 779 "779" - t780 780 "780" - t781 781 "781" - t782 782 "782" - t783 783 "783" - t784 784 "784" - t785 785 "785" - t786 786 "786" - t787 787 "787" - t788 788 "788" - t789 789 "789" - t790 790 "790" - t791 791 "791" - t792 792 "792" - t793 793 "793" - t794 794 "794" - t795 795 "795" - t796 796 "796" - t797 797 "797" - t798 798 "798" - t799 799 "799" - t800 800 "800" - t801 801 "801" - t802 802 "802" - t803 803 "803" - t804 804 "804" - t805 805 "805" - t806 806 "806" - t807 807 "807" - t808 808 "808" - t809 809 "809" - t810 810 "810" - t811 811 "811" - t812 812 "812" - t813 813 "813" - t814 814 "814" - t815 815 "815" - t816 816 "816" - t817 817 "817" - t818 818 "818" - t819 819 "819" - t820 820 "820" - t821 821 "821" - t822 822 "822" - t823 823 "823" - t824 824 "824" - t825 825 "825" - t826 826 "826" - t827 827 "827" - t828 828 "828" - t829 829 "829" - t830 830 "830" - t831 831 "831" - t832 832 "832" - t833 833 "833" - t834 834 "834" - t835 835 "835" - t836 836 "836" - t837 837 "837" - t838 838 "838" - t839 839 "839" - t840 840 "840" - t841 841 "841" - t842 842 "842" - t843 843 "843" - t844 844 "844" - t845 845 "845" - t846 846 "846" - t847 847 "847" - t848 848 "848" - t849 849 "849" - t850 850 "850" - t851 851 "851" - t852 852 "852" - t853 853 "853" - t854 854 "854" - t855 855 "855" - t856 856 "856" - t857 857 "857" - t858 858 "858" - t859 859 "859" - t860 860 "860" - t861 861 "861" - t862 862 "862" - t863 863 "863" - t864 864 "864" - t865 865 "865" - t866 866 "866" - t867 867 "867" - t868 868 "868" - t869 869 "869" - t870 870 "870" - t871 871 "871" - t872 872 "872" - t873 873 "873" - t874 874 "874" - t875 875 "875" - t876 876 "876" - t877 877 "877" - t878 878 "878" - t879 879 "879" - t880 880 "880" - t881 881 "881" - t882 882 "882" - t883 883 "883" - t884 884 "884" - t885 885 "885" - t886 886 "886" - t887 887 "887" - t888 888 "888" - t889 889 "889" - t890 890 "890" - t891 891 "891" - t892 892 "892" - t893 893 "893" - t894 894 "894" - t895 895 "895" - t896 896 "896" - t897 897 "897" - t898 898 "898" - t899 899 "899" - t900 900 "900" - t901 901 "901" - t902 902 "902" - t903 903 "903" - t904 904 "904" - t905 905 "905" - t906 906 "906" - t907 907 "907" - t908 908 "908" - t909 909 "909" - t910 910 "910" - t911 911 "911" - t912 912 "912" - t913 913 "913" - t914 914 "914" - t915 915 "915" - t916 916 "916" - t917 917 "917" - t918 918 "918" - t919 919 "919" - t920 920 "920" - t921 921 "921" - t922 922 "922" - t923 923 "923" - t924 924 "924" - t925 925 "925" - t926 926 "926" - t927 927 "927" - t928 928 "928" - t929 929 "929" - t930 930 "930" - t931 931 "931" - t932 932 "932" - t933 933 "933" - t934 934 "934" - t935 935 "935" - t936 936 "936" - t937 937 "937" - t938 938 "938" - t939 939 "939" - t940 940 "940" - t941 941 "941" - t942 942 "942" - t943 943 "943" - t944 944 "944" - t945 945 "945" - t946 946 "946" - t947 947 "947" - t948 948 "948" - t949 949 "949" - t950 950 "950" - t951 951 "951" - t952 952 "952" - t953 953 "953" - t954 954 "954" - t955 955 "955" - t956 956 "956" - t957 957 "957" - t958 958 "958" - t959 959 "959" - t960 960 "960" - t961 961 "961" - t962 962 "962" - t963 963 "963" - t964 964 "964" - t965 965 "965" - t966 966 "966" - t967 967 "967" - t968 968 "968" - t969 969 "969" - t970 970 "970" - t971 971 "971" - t972 972 "972" - t973 973 "973" - t974 974 "974" - t975 975 "975" - t976 976 "976" - t977 977 "977" - t978 978 "978" - t979 979 "979" - t980 980 "980" - t981 981 "981" - t982 982 "982" - t983 983 "983" - t984 984 "984" - t985 985 "985" - t986 986 "986" - t987 987 "987" - t988 988 "988" - t989 989 "989" - t990 990 "990" - t991 991 "991" - t992 992 "992" - t993 993 "993" - t994 994 "994" - t995 995 "995" - t996 996 "996" - t997 997 "997" - t998 998 "998" - t999 999 "999" - t1000 1000 "1000" - -%% -exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208" - "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220" - "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232" - "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244" - "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256" - "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268" - "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280" - "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292" - "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304" - "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316" - "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328" - "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340" - "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352" - "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364" - "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376" - "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388" - "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400" - "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412" - "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424" - "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436" - "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448" - "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460" - "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472" - "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484" - "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496" - "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508" - "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520" - "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532" - "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544" - "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556" - "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568" - "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580" - "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592" - "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604" - "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616" - "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628" - "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640" - "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652" - "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664" - "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676" - "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688" - "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700" - "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712" - "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724" - "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736" - "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748" - "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760" - "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772" - "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784" - "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796" - "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808" - "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820" - "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832" - "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844" - "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856" - "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868" - "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880" - "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892" - "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904" - "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916" - "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928" - "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940" - "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952" - "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964" - "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976" - "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988" - "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000" - ; -%% -#include - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int counter = 1; - if (counter <= MAX) - return counter++; - assert (counter++ == MAX + 1); - return 0; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +./calc.at:1491: cat stderr +578. calc.at:1518: skipped (calc.at:1518) +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1491: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1482: cat stderr -./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input -606. torture.at:270: testing State number type: 128 states ... -./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2026-01-31 08:18:11.388666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found - | (- *) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -602. calc.at:1560: 606. torture.at:270: 608. torture.at:272: testing State number type: 256 states ... -./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 - skipped (calc.at:1560) -607. torture.at:271: testing State number type: 129 states ... -./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '*' (1.39: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - skipped (torture.at:270) -stderr: ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2026-01-31 08:18:11.396666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -194505,21 +215828,18 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -194533,29 +215853,64 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -194567,40 +215922,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -194610,136 +215957,259 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2026-01-31 08:18:11.396666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found -607. torture.at:271: 608. torture.at:272: stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token -Next token is token ')' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - - skipped (torture.at:271) - skipped (torture.at:272) - +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -194750,10 +216220,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -609. torture.at:273: testing State number type: 257 states ... -./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -194763,32 +216230,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2026-01-31 08:18:11.412666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found -609. torture.at:273: skipped (torture.at:273) -610. torture.at:274: testing State number type: 32768 states ... -./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 -603. calc.at:1561: --- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2026-01-31 08:18:11.424666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +./calc.at:1489: cat stderr +./calc.at:1491: cat stderr input: - skipped (calc.at:1561) -610. torture.at:274: ./calc.at:1482: cat stderr - | (#) + (#) = 2222 - skipped (torture.at:274) -./calc.at:1489: $PREPARSER ./calc input - input: - - | (* *) + (*) + (*) -./calc.at:1482: $PREPARSER ./calc input - -stderr: + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +579. calc.at:1519: testing Calculator D %locations parse.error=simple ... +./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + | (!!) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -194797,40 +216249,33 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -194841,39 +216286,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -194883,129 +216321,124 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -195013,42 +216446,32 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -195060,54 +216483,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -195117,108 +216518,126 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () +580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... +./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -195239,15 +216658,18 @@ }eg ' expout || exit 77 ./calc.at:1489: cat stderr -./calc.at:1482: cat stderr +./calc.at:1491: cat stderr +579. calc.at:1519: skipped (calc.at:1519) +581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... +./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y input: - | (1 + #) = 1111 + | (1 + 1) / (1 - 1) ./calc.at:1489: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -stderr: -stderr: + + | (- *) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +580. calc.at:1520: stderr: Starting parse Entering state 0 Reading a token @@ -195255,11 +216677,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195267,51 +216689,89 @@ Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -195321,77 +216781,130 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () + skipped (calc.at:1520) ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + stderr: Starting parse Entering state 0 @@ -195400,11 +216913,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195412,51 +216925,89 @@ Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -195466,88 +217017,411 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... +./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... +./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1489: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +581. calc.at:1521: skipped (calc.at:1521) +564. calc.at:1489: + ok + +./calc.at:1491: cat stderr +584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: + | (* *) + (*) + (*) +./calc.at:1491: $PREPARSER ./calc input +585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... +./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +583. calc.at:1524: skipped (calc.at:1524) +582. calc.at:1523: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: -611. torture.at:275: testing State number type: 65536 states ... -./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1482: $PREPARSER ./calc input -612. torture.at:276: testing State number type: 65537 states ... -./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2026-01-31 08:18:11.496666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found -./calc.at:1489: "$PERL" -pi -e 'use strict; +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + skipped (calc.at:1523) + + +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +584. calc.at:1525: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + skipped (calc.at:1525) + +586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... +./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +585. calc.at:1530: 587. calc.at:1532: testing Calculator D api.push-pull=both ... +./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + skipped (calc.at:1530) + +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -195557,159 +217431,286 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -611. torture.at:275: --- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2026-01-31 08:18:11.496666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found +./calc.at:1491: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1491: $PREPARSER ./calc input stderr: - skipped (torture.at:275) Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: + | 1 + 2 * 3 + !- ++ +587. calc.at:1532: ./calc.at:1491: $PREPARSER ./calc input + skipped (calc.at:1532) +stderr: +586. calc.at:1531: Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 13 Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -612. torture.at:276: 614. torture.at:485: testing Exploding the Stack Size with Alloca ... -./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - skipped (torture.at:276) + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + skipped (calc.at:1531) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -613. torture.at:385: testing Many lookahead tokens ... Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 13 Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) -input: - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -195719,152 +217720,386 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +589. calc.at:1544: testing Calculator Java ... +./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1491: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1491: $PREPARSER ./calc input stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -615. torture.at:531: testing Exploding the Stack Size with Malloc ... Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 Reading a token -Next token is token ')' () +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +588. calc.at:1533: skipped (calc.at:1533) +input: + | (1 + #) = 1111 +./calc.at:1491: $PREPARSER ./calc input + stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' () +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +590. calc.at:1545: testing Calculator Java parse.error=custom ... +./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +591. calc.at:1546: testing Calculator Java parse.error=detailed ... +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -195874,28311 +218109,182 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1491: cat stderr +./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: + | (# + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +stderr: stdout: -%define parse.error verbose -%debug -%{ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include - -# include -# include -# include -# define MAX 1000 -static int yylex (void); -#include +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh -/* !POSIX */ static void yyerror (const char *msg); -%} -%union -{ - int val; -}; +589. calc.at:1544: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + skipped (calc.at:1544) +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) -%type input exp -%token token -%type n1 n2 n3 n4 n5 n6 n7 n8 n9 n10 n11 n12 n13 n14 n15 n16 n17 n18 - n19 n20 n21 n22 n23 n24 n25 n26 n27 n28 n29 n30 n31 n32 n33 n34 - n35 n36 n37 n38 n39 n40 n41 n42 n43 n44 n45 n46 n47 n48 n49 n50 - n51 n52 n53 n54 n55 n56 n57 n58 n59 n60 n61 n62 n63 n64 n65 n66 - n67 n68 n69 n70 n71 n72 n73 n74 n75 n76 n77 n78 n79 n80 n81 n82 - n83 n84 n85 n86 n87 n88 n89 n90 n91 n92 n93 n94 n95 n96 n97 n98 - n99 n100 n101 n102 n103 n104 n105 n106 n107 n108 n109 n110 n111 - n112 n113 n114 n115 n116 n117 n118 n119 n120 n121 n122 n123 - n124 n125 n126 n127 n128 n129 n130 n131 n132 n133 n134 n135 - n136 n137 n138 n139 n140 n141 n142 n143 n144 n145 n146 n147 - n148 n149 n150 n151 n152 n153 n154 n155 n156 n157 n158 n159 - n160 n161 n162 n163 n164 n165 n166 n167 n168 n169 n170 n171 - n172 n173 n174 n175 n176 n177 n178 n179 n180 n181 n182 n183 - n184 n185 n186 n187 n188 n189 n190 n191 n192 n193 n194 n195 - n196 n197 n198 n199 n200 n201 n202 n203 n204 n205 n206 n207 - n208 n209 n210 n211 n212 n213 n214 n215 n216 n217 n218 n219 - n220 n221 n222 n223 n224 n225 n226 n227 n228 n229 n230 n231 - n232 n233 n234 n235 n236 n237 n238 n239 n240 n241 n242 n243 - n244 n245 n246 n247 n248 n249 n250 n251 n252 n253 n254 n255 - n256 n257 n258 n259 n260 n261 n262 n263 n264 n265 n266 n267 - n268 n269 n270 n271 n272 n273 n274 n275 n276 n277 n278 n279 - n280 n281 n282 n283 n284 n285 n286 n287 n288 n289 n290 n291 - n292 n293 n294 n295 n296 n297 n298 n299 n300 n301 n302 n303 - n304 n305 n306 n307 n308 n309 n310 n311 n312 n313 n314 n315 - n316 n317 n318 n319 n320 n321 n322 n323 n324 n325 n326 n327 - n328 n329 n330 n331 n332 n333 n334 n335 n336 n337 n338 n339 - n340 n341 n342 n343 n344 n345 n346 n347 n348 n349 n350 n351 - n352 n353 n354 n355 n356 n357 n358 n359 n360 n361 n362 n363 - n364 n365 n366 n367 n368 n369 n370 n371 n372 n373 n374 n375 - n376 n377 n378 n379 n380 n381 n382 n383 n384 n385 n386 n387 - n388 n389 n390 n391 n392 n393 n394 n395 n396 n397 n398 n399 - n400 n401 n402 n403 n404 n405 n406 n407 n408 n409 n410 n411 - n412 n413 n414 n415 n416 n417 n418 n419 n420 n421 n422 n423 - n424 n425 n426 n427 n428 n429 n430 n431 n432 n433 n434 n435 - n436 n437 n438 n439 n440 n441 n442 n443 n444 n445 n446 n447 - n448 n449 n450 n451 n452 n453 n454 n455 n456 n457 n458 n459 - n460 n461 n462 n463 n464 n465 n466 n467 n468 n469 n470 n471 - n472 n473 n474 n475 n476 n477 n478 n479 n480 n481 n482 n483 - n484 n485 n486 n487 n488 n489 n490 n491 n492 n493 n494 n495 - n496 n497 n498 n499 n500 n501 n502 n503 n504 n505 n506 n507 - n508 n509 n510 n511 n512 n513 n514 n515 n516 n517 n518 n519 - n520 n521 n522 n523 n524 n525 n526 n527 n528 n529 n530 n531 - n532 n533 n534 n535 n536 n537 n538 n539 n540 n541 n542 n543 - n544 n545 n546 n547 n548 n549 n550 n551 n552 n553 n554 n555 - n556 n557 n558 n559 n560 n561 n562 n563 n564 n565 n566 n567 - n568 n569 n570 n571 n572 n573 n574 n575 n576 n577 n578 n579 - n580 n581 n582 n583 n584 n585 n586 n587 n588 n589 n590 n591 - n592 n593 n594 n595 n596 n597 n598 n599 n600 n601 n602 n603 - n604 n605 n606 n607 n608 n609 n610 n611 n612 n613 n614 n615 - n616 n617 n618 n619 n620 n621 n622 n623 n624 n625 n626 n627 - n628 n629 n630 n631 n632 n633 n634 n635 n636 n637 n638 n639 - n640 n641 n642 n643 n644 n645 n646 n647 n648 n649 n650 n651 - n652 n653 n654 n655 n656 n657 n658 n659 n660 n661 n662 n663 - n664 n665 n666 n667 n668 n669 n670 n671 n672 n673 n674 n675 - n676 n677 n678 n679 n680 n681 n682 n683 n684 n685 n686 n687 - n688 n689 n690 n691 n692 n693 n694 n695 n696 n697 n698 n699 - n700 n701 n702 n703 n704 n705 n706 n707 n708 n709 n710 n711 - n712 n713 n714 n715 n716 n717 n718 n719 n720 n721 n722 n723 - n724 n725 n726 n727 n728 n729 n730 n731 n732 n733 n734 n735 - n736 n737 n738 n739 n740 n741 n742 n743 n744 n745 n746 n747 - n748 n749 n750 n751 n752 n753 n754 n755 n756 n757 n758 n759 - n760 n761 n762 n763 n764 n765 n766 n767 n768 n769 n770 n771 - n772 n773 n774 n775 n776 n777 n778 n779 n780 n781 n782 n783 - n784 n785 n786 n787 n788 n789 n790 n791 n792 n793 n794 n795 - n796 n797 n798 n799 n800 n801 n802 n803 n804 n805 n806 n807 - n808 n809 n810 n811 n812 n813 n814 n815 n816 n817 n818 n819 - n820 n821 n822 n823 n824 n825 n826 n827 n828 n829 n830 n831 - n832 n833 n834 n835 n836 n837 n838 n839 n840 n841 n842 n843 - n844 n845 n846 n847 n848 n849 n850 n851 n852 n853 n854 n855 - n856 n857 n858 n859 n860 n861 n862 n863 n864 n865 n866 n867 - n868 n869 n870 n871 n872 n873 n874 n875 n876 n877 n878 n879 - n880 n881 n882 n883 n884 n885 n886 n887 n888 n889 n890 n891 - n892 n893 n894 n895 n896 n897 n898 n899 n900 n901 n902 n903 - n904 n905 n906 n907 n908 n909 n910 n911 n912 n913 n914 n915 - n916 n917 n918 n919 n920 n921 n922 n923 n924 n925 n926 n927 - n928 n929 n930 n931 n932 n933 n934 n935 n936 n937 n938 n939 - n940 n941 n942 n943 n944 n945 n946 n947 n948 n949 n950 n951 - n952 n953 n954 n955 n956 n957 n958 n959 n960 n961 n962 n963 - n964 n965 n966 n967 n968 n969 n970 n971 n972 n973 n974 n975 - n976 n977 n978 n979 n980 n981 n982 n983 n984 n985 n986 n987 - n988 n989 n990 n991 n992 n993 n994 n995 n996 n997 n998 n999 - n1000 -%token - t1 1 "1" - t2 2 "2" - t3 3 "3" - t4 4 "4" - t5 5 "5" - t6 6 "6" - t7 7 "7" - t8 8 "8" - t9 9 "9" - t10 10 "10" - t11 11 "11" - t12 12 "12" - t13 13 "13" - t14 14 "14" - t15 15 "15" - t16 16 "16" - t17 17 "17" - t18 18 "18" - t19 19 "19" - t20 20 "20" - t21 21 "21" - t22 22 "22" - t23 23 "23" - t24 24 "24" - t25 25 "25" - t26 26 "26" - t27 27 "27" - t28 28 "28" - t29 29 "29" - t30 30 "30" - t31 31 "31" - t32 32 "32" - t33 33 "33" - t34 34 "34" - t35 35 "35" - t36 36 "36" - t37 37 "37" - t38 38 "38" - t39 39 "39" - t40 40 "40" - t41 41 "41" - t42 42 "42" - t43 43 "43" - t44 44 "44" - t45 45 "45" - t46 46 "46" - t47 47 "47" - t48 48 "48" - t49 49 "49" - t50 50 "50" - t51 51 "51" - t52 52 "52" - t53 53 "53" - t54 54 "54" - t55 55 "55" - t56 56 "56" - t57 57 "57" - t58 58 "58" - t59 59 "59" - t60 60 "60" - t61 61 "61" - t62 62 "62" - t63 63 "63" - t64 64 "64" - t65 65 "65" - t66 66 "66" - t67 67 "67" - t68 68 "68" - t69 69 "69" - t70 70 "70" - t71 71 "71" - t72 72 "72" - t73 73 "73" - t74 74 "74" - t75 75 "75" - t76 76 "76" - t77 77 "77" - t78 78 "78" - t79 79 "79" - t80 80 "80" - t81 81 "81" - t82 82 "82" - t83 83 "83" - t84 84 "84" - t85 85 "85" - t86 86 "86" - t87 87 "87" - t88 88 "88" - t89 89 "89" - t90 90 "90" - t91 91 "91" - t92 92 "92" - t93 93 "93" - t94 94 "94" - t95 95 "95" - t96 96 "96" - t97 97 "97" - t98 98 "98" - t99 99 "99" - t100 100 "100" - t101 101 "101" - t102 102 "102" - t103 103 "103" - t104 104 "104" - t105 105 "105" - t106 106 "106" - t107 107 "107" - t108 108 "108" - t109 109 "109" - t110 110 "110" - t111 111 "111" - t112 112 "112" - t113 113 "113" - t114 114 "114" - t115 115 "115" - t116 116 "116" - t117 117 "117" - t118 118 "118" - t119 119 "119" - t120 120 "120" - t121 121 "121" - t122 122 "122" - t123 123 "123" - t124 124 "124" - t125 125 "125" - t126 126 "126" - t127 127 "127" - t128 128 "128" - t129 129 "129" - t130 130 "130" - t131 131 "131" - t132 132 "132" - t133 133 "133" - t134 134 "134" - t135 135 "135" - t136 136 "136" - t137 137 "137" - t138 138 "138" - t139 139 "139" - t140 140 "140" - t141 141 "141" - t142 142 "142" - t143 143 "143" - t144 144 "144" - t145 145 "145" - t146 146 "146" - t147 147 "147" - t148 148 "148" - t149 149 "149" - t150 150 "150" - t151 151 "151" - t152 152 "152" - t153 153 "153" - t154 154 "154" - t155 155 "155" - t156 156 "156" - t157 157 "157" - t158 158 "158" - t159 159 "159" - t160 160 "160" - t161 161 "161" - t162 162 "162" - t163 163 "163" - t164 164 "164" - t165 165 "165" - t166 166 "166" - t167 167 "167" - t168 168 "168" - t169 169 "169" - t170 170 "170" - t171 171 "171" - t172 172 "172" - t173 173 "173" - t174 174 "174" - t175 175 "175" - t176 176 "176" - t177 177 "177" - t178 178 "178" - t179 179 "179" - t180 180 "180" - t181 181 "181" - t182 182 "182" - t183 183 "183" - t184 184 "184" - t185 185 "185" - t186 186 "186" - t187 187 "187" - t188 188 "188" - t189 189 "189" - t190 190 "190" - t191 191 "191" - t192 192 "192" - t193 193 "193" - t194 194 "194" - t195 195 "195" - t196 196 "196" - t197 197 "197" - t198 198 "198" - t199 199 "199" - t200 200 "200" - t201 201 "201" - t202 202 "202" - t203 203 "203" - t204 204 "204" - t205 205 "205" - t206 206 "206" - t207 207 "207" - t208 208 "208" - t209 209 "209" - t210 210 "210" - t211 211 "211" - t212 212 "212" - t213 213 "213" - t214 214 "214" - t215 215 "215" - t216 216 "216" - t217 217 "217" - t218 218 "218" - t219 219 "219" - t220 220 "220" - t221 221 "221" - t222 222 "222" - t223 223 "223" - t224 224 "224" - t225 225 "225" - t226 226 "226" - t227 227 "227" - t228 228 "228" - t229 229 "229" - t230 230 "230" - t231 231 "231" - t232 232 "232" - t233 233 "233" - t234 234 "234" - t235 235 "235" - t236 236 "236" - t237 237 "237" - t238 238 "238" - t239 239 "239" - t240 240 "240" - t241 241 "241" - t242 242 "242" - t243 243 "243" - t244 244 "244" - t245 245 "245" - t246 246 "246" - t247 247 "247" - t248 248 "248" - t249 249 "249" - t250 250 "250" - t251 251 "251" - t252 252 "252" - t253 253 "253" - t254 254 "254" - t255 255 "255" - t256 256 "256" - t257 257 "257" - t258 258 "258" - t259 259 "259" - t260 260 "260" - t261 261 "261" - t262 262 "262" - t263 263 "263" - t264 264 "264" - t265 265 "265" - t266 266 "266" - t267 267 "267" - t268 268 "268" - t269 269 "269" - t270 270 "270" - t271 271 "271" - t272 272 "272" - t273 273 "273" - t274 274 "274" - t275 275 "275" - t276 276 "276" - t277 277 "277" - t278 278 "278" - t279 279 "279" - t280 280 "280" - t281 281 "281" - t282 282 "282" - t283 283 "283" - t284 284 "284" - t285 285 "285" - t286 286 "286" - t287 287 "287" - t288 288 "288" - t289 289 "289" - t290 290 "290" - t291 291 "291" - t292 292 "292" - t293 293 "293" - t294 294 "294" - t295 295 "295" - t296 296 "296" - t297 297 "297" - t298 298 "298" - t299 299 "299" - t300 300 "300" - t301 301 "301" - t302 302 "302" - t303 303 "303" - t304 304 "304" - t305 305 "305" - t306 306 "306" - t307 307 "307" - t308 308 "308" - t309 309 "309" - t310 310 "310" - t311 311 "311" - t312 312 "312" - t313 313 "313" - t314 314 "314" - t315 315 "315" - t316 316 "316" - t317 317 "317" - t318 318 "318" - t319 319 "319" - t320 320 "320" - t321 321 "321" - t322 322 "322" - t323 323 "323" - t324 324 "324" - t325 325 "325" - t326 326 "326" - t327 327 "327" - t328 328 "328" - t329 329 "329" - t330 330 "330" - t331 331 "331" - t332 332 "332" - t333 333 "333" - t334 334 "334" - t335 335 "335" - t336 336 "336" - t337 337 "337" - t338 338 "338" - t339 339 "339" - t340 340 "340" - t341 341 "341" - t342 342 "342" - t343 343 "343" - t344 344 "344" - t345 345 "345" - t346 346 "346" - t347 347 "347" - t348 348 "348" - t349 349 "349" - t350 350 "350" - t351 351 "351" - t352 352 "352" - t353 353 "353" - t354 354 "354" - t355 355 "355" - t356 356 "356" - t357 357 "357" - t358 358 "358" - t359 359 "359" - t360 360 "360" - t361 361 "361" - t362 362 "362" - t363 363 "363" - t364 364 "364" - t365 365 "365" - t366 366 "366" - t367 367 "367" - t368 368 "368" - t369 369 "369" - t370 370 "370" - t371 371 "371" - t372 372 "372" - t373 373 "373" - t374 374 "374" - t375 375 "375" - t376 376 "376" - t377 377 "377" - t378 378 "378" - t379 379 "379" - t380 380 "380" - t381 381 "381" - t382 382 "382" - t383 383 "383" - t384 384 "384" - t385 385 "385" - t386 386 "386" - t387 387 "387" - t388 388 "388" - t389 389 "389" - t390 390 "390" - t391 391 "391" - t392 392 "392" - t393 393 "393" - t394 394 "394" - t395 395 "395" - t396 396 "396" - t397 397 "397" - t398 398 "398" - t399 399 "399" - t400 400 "400" - t401 401 "401" - t402 402 "402" - t403 403 "403" - t404 404 "404" - t405 405 "405" - t406 406 "406" - t407 407 "407" - t408 408 "408" - t409 409 "409" - t410 410 "410" - t411 411 "411" - t412 412 "412" - t413 413 "413" - t414 414 "414" - t415 415 "415" - t416 416 "416" - t417 417 "417" - t418 418 "418" - t419 419 "419" - t420 420 "420" - t421 421 "421" - t422 422 "422" - t423 423 "423" - t424 424 "424" - t425 425 "425" - t426 426 "426" - t427 427 "427" - t428 428 "428" - t429 429 "429" - t430 430 "430" - t431 431 "431" - t432 432 "432" - t433 433 "433" - t434 434 "434" - t435 435 "435" - t436 436 "436" - t437 437 "437" - t438 438 "438" - t439 439 "439" - t440 440 "440" - t441 441 "441" - t442 442 "442" - t443 443 "443" - t444 444 "444" - t445 445 "445" - t446 446 "446" - t447 447 "447" - t448 448 "448" - t449 449 "449" - t450 450 "450" - t451 451 "451" - t452 452 "452" - t453 453 "453" - t454 454 "454" - t455 455 "455" - t456 456 "456" - t457 457 "457" - t458 458 "458" - t459 459 "459" - t460 460 "460" - t461 461 "461" - t462 462 "462" - t463 463 "463" - t464 464 "464" - t465 465 "465" - t466 466 "466" - t467 467 "467" - t468 468 "468" - t469 469 "469" - t470 470 "470" - t471 471 "471" - t472 472 "472" - t473 473 "473" - t474 474 "474" - t475 475 "475" - t476 476 "476" - t477 477 "477" - t478 478 "478" - t479 479 "479" - t480 480 "480" - t481 481 "481" - t482 482 "482" - t483 483 "483" - t484 484 "484" - t485 485 "485" - t486 486 "486" - t487 487 "487" - t488 488 "488" - t489 489 "489" - t490 490 "490" - t491 491 "491" - t492 492 "492" - t493 493 "493" - t494 494 "494" - t495 495 "495" - t496 496 "496" - t497 497 "497" - t498 498 "498" - t499 499 "499" - t500 500 "500" - t501 501 "501" - t502 502 "502" - t503 503 "503" - t504 504 "504" - t505 505 "505" - t506 506 "506" - t507 507 "507" - t508 508 "508" - t509 509 "509" - t510 510 "510" - t511 511 "511" - t512 512 "512" - t513 513 "513" - t514 514 "514" - t515 515 "515" - t516 516 "516" - t517 517 "517" - t518 518 "518" - t519 519 "519" - t520 520 "520" - t521 521 "521" - t522 522 "522" - t523 523 "523" - t524 524 "524" - t525 525 "525" - t526 526 "526" - t527 527 "527" - t528 528 "528" - t529 529 "529" - t530 530 "530" - t531 531 "531" - t532 532 "532" - t533 533 "533" - t534 534 "534" - t535 535 "535" - t536 536 "536" - t537 537 "537" - t538 538 "538" - t539 539 "539" - t540 540 "540" - t541 541 "541" - t542 542 "542" - t543 543 "543" - t544 544 "544" - t545 545 "545" - t546 546 "546" - t547 547 "547" - t548 548 "548" - t549 549 "549" - t550 550 "550" - t551 551 "551" - t552 552 "552" - t553 553 "553" - t554 554 "554" - t555 555 "555" - t556 556 "556" - t557 557 "557" - t558 558 "558" - t559 559 "559" - t560 560 "560" - t561 561 "561" - t562 562 "562" - t563 563 "563" - t564 564 "564" - t565 565 "565" - t566 566 "566" - t567 567 "567" - t568 568 "568" - t569 569 "569" - t570 570 "570" - t571 571 "571" - t572 572 "572" - t573 573 "573" - t574 574 "574" - t575 575 "575" - t576 576 "576" - t577 577 "577" - t578 578 "578" - t579 579 "579" - t580 580 "580" - t581 581 "581" - t582 582 "582" - t583 583 "583" - t584 584 "584" - t585 585 "585" - t586 586 "586" - t587 587 "587" - t588 588 "588" - t589 589 "589" - t590 590 "590" - t591 591 "591" - t592 592 "592" - t593 593 "593" - t594 594 "594" - t595 595 "595" - t596 596 "596" - t597 597 "597" - t598 598 "598" - t599 599 "599" - t600 600 "600" - t601 601 "601" - t602 602 "602" - t603 603 "603" - t604 604 "604" - t605 605 "605" - t606 606 "606" - t607 607 "607" - t608 608 "608" - t609 609 "609" - t610 610 "610" - t611 611 "611" - t612 612 "612" - t613 613 "613" - t614 614 "614" - t615 615 "615" - t616 616 "616" - t617 617 "617" - t618 618 "618" - t619 619 "619" - t620 620 "620" - t621 621 "621" - t622 622 "622" - t623 623 "623" - t624 624 "624" - t625 625 "625" - t626 626 "626" - t627 627 "627" - t628 628 "628" - t629 629 "629" - t630 630 "630" - t631 631 "631" - t632 632 "632" - t633 633 "633" - t634 634 "634" - t635 635 "635" - t636 636 "636" - t637 637 "637" - t638 638 "638" - t639 639 "639" - t640 640 "640" - t641 641 "641" - t642 642 "642" - t643 643 "643" - t644 644 "644" - t645 645 "645" - t646 646 "646" - t647 647 "647" - t648 648 "648" - t649 649 "649" - t650 650 "650" - t651 651 "651" - t652 652 "652" - t653 653 "653" - t654 654 "654" - t655 655 "655" - t656 656 "656" - t657 657 "657" - t658 658 "658" - t659 659 "659" - t660 660 "660" - t661 661 "661" - t662 662 "662" - t663 663 "663" - t664 664 "664" - t665 665 "665" - t666 666 "666" - t667 667 "667" - t668 668 "668" - t669 669 "669" - t670 670 "670" - t671 671 "671" - t672 672 "672" - t673 673 "673" - t674 674 "674" - t675 675 "675" - t676 676 "676" - t677 677 "677" - t678 678 "678" - t679 679 "679" - t680 680 "680" - t681 681 "681" - t682 682 "682" - t683 683 "683" - t684 684 "684" - t685 685 "685" - t686 686 "686" - t687 687 "687" - t688 688 "688" - t689 689 "689" - t690 690 "690" - t691 691 "691" - t692 692 "692" - t693 693 "693" - t694 694 "694" - t695 695 "695" - t696 696 "696" - t697 697 "697" - t698 698 "698" - t699 699 "699" - t700 700 "700" - t701 701 "701" - t702 702 "702" - t703 703 "703" - t704 704 "704" - t705 705 "705" - t706 706 "706" - t707 707 "707" - t708 708 "708" - t709 709 "709" - t710 710 "710" - t711 711 "711" - t712 712 "712" - t713 713 "713" - t714 714 "714" - t715 715 "715" - t716 716 "716" - t717 717 "717" - t718 718 "718" - t719 719 "719" - t720 720 "720" - t721 721 "721" - t722 722 "722" - t723 723 "723" - t724 724 "724" - t725 725 "725" - t726 726 "726" - t727 727 "727" - t728 728 "728" - t729 729 "729" - t730 730 "730" - t731 731 "731" - t732 732 "732" - t733 733 "733" - t734 734 "734" - t735 735 "735" - t736 736 "736" - t737 737 "737" - t738 738 "738" - t739 739 "739" - t740 740 "740" - t741 741 "741" - t742 742 "742" - t743 743 "743" - t744 744 "744" - t745 745 "745" - t746 746 "746" - t747 747 "747" - t748 748 "748" - t749 749 "749" - t750 750 "750" - t751 751 "751" - t752 752 "752" - t753 753 "753" - t754 754 "754" - t755 755 "755" - t756 756 "756" - t757 757 "757" - t758 758 "758" - t759 759 "759" - t760 760 "760" - t761 761 "761" - t762 762 "762" - t763 763 "763" - t764 764 "764" - t765 765 "765" - t766 766 "766" - t767 767 "767" - t768 768 "768" - t769 769 "769" - t770 770 "770" - t771 771 "771" - t772 772 "772" - t773 773 "773" - t774 774 "774" - t775 775 "775" - t776 776 "776" - t777 777 "777" - t778 778 "778" - t779 779 "779" - t780 780 "780" - t781 781 "781" - t782 782 "782" - t783 783 "783" - t784 784 "784" - t785 785 "785" - t786 786 "786" - t787 787 "787" - t788 788 "788" - t789 789 "789" - t790 790 "790" - t791 791 "791" - t792 792 "792" - t793 793 "793" - t794 794 "794" - t795 795 "795" - t796 796 "796" - t797 797 "797" - t798 798 "798" - t799 799 "799" - t800 800 "800" - t801 801 "801" - t802 802 "802" - t803 803 "803" - t804 804 "804" - t805 805 "805" - t806 806 "806" - t807 807 "807" - t808 808 "808" - t809 809 "809" - t810 810 "810" - t811 811 "811" - t812 812 "812" - t813 813 "813" - t814 814 "814" - t815 815 "815" - t816 816 "816" - t817 817 "817" - t818 818 "818" - t819 819 "819" - t820 820 "820" - t821 821 "821" - t822 822 "822" - t823 823 "823" - t824 824 "824" - t825 825 "825" - t826 826 "826" - t827 827 "827" - t828 828 "828" - t829 829 "829" - t830 830 "830" - t831 831 "831" - t832 832 "832" - t833 833 "833" - t834 834 "834" - t835 835 "835" - t836 836 "836" - t837 837 "837" - t838 838 "838" - t839 839 "839" - t840 840 "840" - t841 841 "841" - t842 842 "842" - t843 843 "843" - t844 844 "844" - t845 845 "845" - t846 846 "846" - t847 847 "847" - t848 848 "848" - t849 849 "849" - t850 850 "850" - t851 851 "851" - t852 852 "852" - t853 853 "853" - t854 854 "854" - t855 855 "855" - t856 856 "856" - t857 857 "857" - t858 858 "858" - t859 859 "859" - t860 860 "860" - t861 861 "861" - t862 862 "862" - t863 863 "863" - t864 864 "864" - t865 865 "865" - t866 866 "866" - t867 867 "867" - t868 868 "868" - t869 869 "869" - t870 870 "870" - t871 871 "871" - t872 872 "872" - t873 873 "873" - t874 874 "874" - t875 875 "875" - t876 876 "876" - t877 877 "877" - t878 878 "878" - t879 879 "879" - t880 880 "880" - t881 881 "881" - t882 882 "882" - t883 883 "883" - t884 884 "884" - t885 885 "885" - t886 886 "886" - t887 887 "887" - t888 888 "888" - t889 889 "889" - t890 890 "890" - t891 891 "891" - t892 892 "892" - t893 893 "893" - t894 894 "894" - t895 895 "895" - t896 896 "896" - t897 897 "897" - t898 898 "898" - t899 899 "899" - t900 900 "900" - t901 901 "901" - t902 902 "902" - t903 903 "903" - t904 904 "904" - t905 905 "905" - t906 906 "906" - t907 907 "907" - t908 908 "908" - t909 909 "909" - t910 910 "910" - t911 911 "911" - t912 912 "912" - t913 913 "913" - t914 914 "914" - t915 915 "915" - t916 916 "916" - t917 917 "917" - t918 918 "918" - t919 919 "919" - t920 920 "920" - t921 921 "921" - t922 922 "922" - t923 923 "923" - t924 924 "924" - t925 925 "925" - t926 926 "926" - t927 927 "927" - t928 928 "928" - t929 929 "929" - t930 930 "930" - t931 931 "931" - t932 932 "932" - t933 933 "933" - t934 934 "934" - t935 935 "935" - t936 936 "936" - t937 937 "937" - t938 938 "938" - t939 939 "939" - t940 940 "940" - t941 941 "941" - t942 942 "942" - t943 943 "943" - t944 944 "944" - t945 945 "945" - t946 946 "946" - t947 947 "947" - t948 948 "948" - t949 949 "949" - t950 950 "950" - t951 951 "951" - t952 952 "952" - t953 953 "953" - t954 954 "954" - t955 955 "955" - t956 956 "956" - t957 957 "957" - t958 958 "958" - t959 959 "959" - t960 960 "960" - t961 961 "961" - t962 962 "962" - t963 963 "963" - t964 964 "964" - t965 965 "965" - t966 966 "966" - t967 967 "967" - t968 968 "968" - t969 969 "969" - t970 970 "970" - t971 971 "971" - t972 972 "972" - t973 973 "973" - t974 974 "974" - t975 975 "975" - t976 976 "976" - t977 977 "977" - t978 978 "978" - t979 979 "979" - t980 980 "980" - t981 981 "981" - t982 982 "982" - t983 983 "983" - t984 984 "984" - t985 985 "985" - t986 986 "986" - t987 987 "987" - t988 988 "988" - t989 989 "989" - t990 990 "990" - t991 991 "991" - t992 992 "992" - t993 993 "993" - t994 994 "994" - t995 995 "995" - t996 996 "996" - t997 997 "997" - t998 998 "998" - t999 999 "999" - t1000 1000 "1000" -%% -input: - exp { assert ($1 == 1); $$ = $1; } -| input exp { assert ($2 == $1 + 1); $$ = $2; } -; - -exp: - n1 "1" { assert ($1 == 1); $$ = $1; } -| n2 "2" { assert ($1 == 2); $$ = $1; } -| n3 "3" { assert ($1 == 3); $$ = $1; } -| n4 "4" { assert ($1 == 4); $$ = $1; } -| n5 "5" { assert ($1 == 5); $$ = $1; } -| n6 "6" { assert ($1 == 6); $$ = $1; } -| n7 "7" { assert ($1 == 7); $$ = $1; } -| n8 "8" { assert ($1 == 8); $$ = $1; } -| n9 "9" { assert ($1 == 9); $$ = $1; } -| n10 "10" { assert ($1 == 10); $$ = $1; } -| n11 "11" { assert ($1 == 11); $$ = $1; } -| n12 "12" { assert ($1 == 12); $$ = $1; } -| n13 "13" { assert ($1 == 13); $$ = $1; } -| n14 "14" { assert ($1 == 14); $$ = $1; } -| n15 "15" { assert ($1 == 15); $$ = $1; } -| n16 "16" { assert ($1 == 16); $$ = $1; } -| n17 "17" { assert ($1 == 17); $$ = $1; } -| n18 "18" { assert ($1 == 18); $$ = $1; } -| n19 "19" { assert ($1 == 19); $$ = $1; } -| n20 "20" { assert ($1 == 20); $$ = $1; } -| n21 "21" { assert ($1 == 21); $$ = $1; } -| n22 "22" { assert ($1 == 22); $$ = $1; } -| n23 "23" { assert ($1 == 23); $$ = $1; } -| n24 "24" { assert ($1 == 24); $$ = $1; } -| n25 "25" { assert ($1 == 25); $$ = $1; } -| n26 "26" { assert ($1 == 26); $$ = $1; } -| n27 "27" { assert ($1 == 27); $$ = $1; } -| n28 "28" { assert ($1 == 28); $$ = $1; } -| n29 "29" { assert ($1 == 29); $$ = $1; } -| n30 "30" { assert ($1 == 30); $$ = $1; } -| n31 "31" { assert ($1 == 31); $$ = $1; } -| n32 "32" { assert ($1 == 32); $$ = $1; } -| n33 "33" { assert ($1 == 33); $$ = $1; } -| n34 "34" { assert ($1 == 34); $$ = $1; } -| n35 "35" { assert ($1 == 35); $$ = $1; } -| n36 "36" { assert ($1 == 36); $$ = $1; } -| n37 "37" { assert ($1 == 37); $$ = $1; } -| n38 "38" { assert ($1 == 38); $$ = $1; } -| n39 "39" { assert ($1 == 39); $$ = $1; } -| n40 "40" { assert ($1 == 40); $$ = $1; } -| n41 "41" { assert ($1 == 41); $$ = $1; } -| n42 "42" { assert ($1 == 42); $$ = $1; } -| n43 "43" { assert ($1 == 43); $$ = $1; } -| n44 "44" { assert ($1 == 44); $$ = $1; } -| n45 "45" { assert ($1 == 45); $$ = $1; } -| n46 "46" { assert ($1 == 46); $$ = $1; } -| n47 "47" { assert ($1 == 47); $$ = $1; } -| n48 "48" { assert ($1 == 48); $$ = $1; } -| n49 "49" { assert ($1 == 49); $$ = $1; } -| n50 "50" { assert ($1 == 50); $$ = $1; } -| n51 "51" { assert ($1 == 51); $$ = $1; } -| n52 "52" { assert ($1 == 52); $$ = $1; } -| n53 "53" { assert ($1 == 53); $$ = $1; } -| n54 "54" { assert ($1 == 54); $$ = $1; } -| n55 "55" { assert ($1 == 55); $$ = $1; } -| n56 "56" { assert ($1 == 56); $$ = $1; } -| n57 "57" { assert ($1 == 57); $$ = $1; } -| n58 "58" { assert ($1 == 58); $$ = $1; } -| n59 "59" { assert ($1 == 59); $$ = $1; } -| n60 "60" { assert ($1 == 60); $$ = $1; } -| n61 "61" { assert ($1 == 61); $$ = $1; } -| n62 "62" { assert ($1 == 62); $$ = $1; } -| n63 "63" { assert ($1 == 63); $$ = $1; } -| n64 "64" { assert ($1 == 64); $$ = $1; } -| n65 "65" { assert ($1 == 65); $$ = $1; } -| n66 "66" { assert ($1 == 66); $$ = $1; } -| n67 "67" { assert ($1 == 67); $$ = $1; } -| n68 "68" { assert ($1 == 68); $$ = $1; } -| n69 "69" { assert ($1 == 69); $$ = $1; } -| n70 "70" { assert ($1 == 70); $$ = $1; } -| n71 "71" { assert ($1 == 71); $$ = $1; } -| n72 "72" { assert ($1 == 72); $$ = $1; } -| n73 "73" { assert ($1 == 73); $$ = $1; } -| n74 "74" { assert ($1 == 74); $$ = $1; } -| n75 "75" { assert ($1 == 75); $$ = $1; } -| n76 "76" { assert ($1 == 76); $$ = $1; } -| n77 "77" { assert ($1 == 77); $$ = $1; } -| n78 "78" { assert ($1 == 78); $$ = $1; } -| n79 "79" { assert ($1 == 79); $$ = $1; } -| n80 "80" { assert ($1 == 80); $$ = $1; } -| n81 "81" { assert ($1 == 81); $$ = $1; } -| n82 "82" { assert ($1 == 82); $$ = $1; } -| n83 "83" { assert ($1 == 83); $$ = $1; } -| n84 "84" { assert ($1 == 84); $$ = $1; } -| n85 "85" { assert ($1 == 85); $$ = $1; } -| n86 "86" { assert ($1 == 86); $$ = $1; } -| n87 "87" { assert ($1 == 87); $$ = $1; } -| n88 "88" { assert ($1 == 88); $$ = $1; } -| n89 "89" { assert ($1 == 89); $$ = $1; } -| n90 "90" { assert ($1 == 90); $$ = $1; } -| n91 "91" { assert ($1 == 91); $$ = $1; } -| n92 "92" { assert ($1 == 92); $$ = $1; } -| n93 "93" { assert ($1 == 93); $$ = $1; } -| n94 "94" { assert ($1 == 94); $$ = $1; } -| n95 "95" { assert ($1 == 95); $$ = $1; } -| n96 "96" { assert ($1 == 96); $$ = $1; } -| n97 "97" { assert ($1 == 97); $$ = $1; } -| n98 "98" { assert ($1 == 98); $$ = $1; } -| n99 "99" { assert ($1 == 99); $$ = $1; } -| n100 "100" { assert ($1 == 100); $$ = $1; } -| n101 "101" { assert ($1 == 101); $$ = $1; } -| n102 "102" { assert ($1 == 102); $$ = $1; } -| n103 "103" { assert ($1 == 103); $$ = $1; } -| n104 "104" { assert ($1 == 104); $$ = $1; } -| n105 "105" { assert ($1 == 105); $$ = $1; } -| n106 "106" { assert ($1 == 106); $$ = $1; } -| n107 "107" { assert ($1 == 107); $$ = $1; } -| n108 "108" { assert ($1 == 108); $$ = $1; } -| n109 "109" { assert ($1 == 109); $$ = $1; } -| n110 "110" { assert ($1 == 110); $$ = $1; } -| n111 "111" { assert ($1 == 111); $$ = $1; } -| n112 "112" { assert ($1 == 112); $$ = $1; } -| n113 "113" { assert ($1 == 113); $$ = $1; } -| n114 "114" { assert ($1 == 114); $$ = $1; } -| n115 "115" { assert ($1 == 115); $$ = $1; } -| n116 "116" { assert ($1 == 116); $$ = $1; } -| n117 "117" { assert ($1 == 117); $$ = $1; } -| n118 "118" { assert ($1 == 118); $$ = $1; } -| n119 "119" { assert ($1 == 119); $$ = $1; } -| n120 "120" { assert ($1 == 120); $$ = $1; } -| n121 "121" { assert ($1 == 121); $$ = $1; } -| n122 "122" { assert ($1 == 122); $$ = $1; } -| n123 "123" { assert ($1 == 123); $$ = $1; } -| n124 "124" { assert ($1 == 124); $$ = $1; } -| n125 "125" { assert ($1 == 125); $$ = $1; } -| n126 "126" { assert ($1 == 126); $$ = $1; } -| n127 "127" { assert ($1 == 127); $$ = $1; } -| n128 "128" { assert ($1 == 128); $$ = $1; } -| n129 "129" { assert ($1 == 129); $$ = $1; } -| n130 "130" { assert ($1 == 130); $$ = $1; } -| n131 "131" { assert ($1 == 131); $$ = $1; } -| n132 "132" { assert ($1 == 132); $$ = $1; } -| n133 "133" { assert ($1 == 133); $$ = $1; } -| n134 "134" { assert ($1 == 134); $$ = $1; } -| n135 "135" { assert ($1 == 135); $$ = $1; } -| n136 "136" { assert ($1 == 136); $$ = $1; } -| n137 "137" { assert ($1 == 137); $$ = $1; } -| n138 "138" { assert ($1 == 138); $$ = $1; } -| n139 "139" { assert ($1 == 139); $$ = $1; } -| n140 "140" { assert ($1 == 140); $$ = $1; } -| n141 "141" { assert ($1 == 141); $$ = $1; } -| n142 "142" { assert ($1 == 142); $$ = $1; } -| n143 "143" { assert ($1 == 143); $$ = $1; } -| n144 "144" { assert ($1 == 144); $$ = $1; } -| n145 "145" { assert ($1 == 145); $$ = $1; } -| n146 "146" { assert ($1 == 146); $$ = $1; } -| n147 "147" { assert ($1 == 147); $$ = $1; } -| n148 "148" { assert ($1 == 148); $$ = $1; } -| n149 "149" { assert ($1 == 149); $$ = $1; } -| n150 "150" { assert ($1 == 150); $$ = $1; } -| n151 "151" { assert ($1 == 151); $$ = $1; } -| n152 "152" { assert ($1 == 152); $$ = $1; } -| n153 "153" { assert ($1 == 153); $$ = $1; } -| n154 "154" { assert ($1 == 154); $$ = $1; } -| n155 "155" { assert ($1 == 155); $$ = $1; } -| n156 "156" { assert ($1 == 156); $$ = $1; } -| n157 "157" { assert ($1 == 157); $$ = $1; } -| n158 "158" { assert ($1 == 158); $$ = $1; } -| n159 "159" { assert ($1 == 159); $$ = $1; } -| n160 "160" { assert ($1 == 160); $$ = $1; } -| n161 "161" { assert ($1 == 161); $$ = $1; } -| n162 "162" { assert ($1 == 162); $$ = $1; } -| n163 "163" { assert ($1 == 163); $$ = $1; } -| n164 "164" { assert ($1 == 164); $$ = $1; } -| n165 "165" { assert ($1 == 165); $$ = $1; } -| n166 "166" { assert ($1 == 166); $$ = $1; } -| n167 "167" { assert ($1 == 167); $$ = $1; } -| n168 "168" { assert ($1 == 168); $$ = $1; } -| n169 "169" { assert ($1 == 169); $$ = $1; } -| n170 "170" { assert ($1 == 170); $$ = $1; } -| n171 "171" { assert ($1 == 171); $$ = $1; } -| n172 "172" { assert ($1 == 172); $$ = $1; } -| n173 "173" { assert ($1 == 173); $$ = $1; } -| n174 "174" { assert ($1 == 174); $$ = $1; } -| n175 "175" { assert ($1 == 175); $$ = $1; } -| n176 "176" { assert ($1 == 176); $$ = $1; } -| n177 "177" { assert ($1 == 177); $$ = $1; } -| n178 "178" { assert ($1 == 178); $$ = $1; } -| n179 "179" { assert ($1 == 179); $$ = $1; } -| n180 "180" { assert ($1 == 180); $$ = $1; } -| n181 "181" { assert ($1 == 181); $$ = $1; } -| n182 "182" { assert ($1 == 182); $$ = $1; } -| n183 "183" { assert ($1 == 183); $$ = $1; } -| n184 "184" { assert ($1 == 184); $$ = $1; } -| n185 "185" { assert ($1 == 185); $$ = $1; } -| n186 "186" { assert ($1 == 186); $$ = $1; } -| n187 "187" { assert ($1 == 187); $$ = $1; } -| n188 "188" { assert ($1 == 188); $$ = $1; } -| n189 "189" { assert ($1 == 189); $$ = $1; } -| n190 "190" { assert ($1 == 190); $$ = $1; } -| n191 "191" { assert ($1 == 191); $$ = $1; } -| n192 "192" { assert ($1 == 192); $$ = $1; } -| n193 "193" { assert ($1 == 193); $$ = $1; } -| n194 "194" { assert ($1 == 194); $$ = $1; } -| n195 "195" { assert ($1 == 195); $$ = $1; } -| n196 "196" { assert ($1 == 196); $$ = $1; } -| n197 "197" { assert ($1 == 197); $$ = $1; } -| n198 "198" { assert ($1 == 198); $$ = $1; } -| n199 "199" { assert ($1 == 199); $$ = $1; } -| n200 "200" { assert ($1 == 200); $$ = $1; } -| n201 "201" { assert ($1 == 201); $$ = $1; } -| n202 "202" { assert ($1 == 202); $$ = $1; } -| n203 "203" { assert ($1 == 203); $$ = $1; } -| n204 "204" { assert ($1 == 204); $$ = $1; } -| n205 "205" { assert ($1 == 205); $$ = $1; } -| n206 "206" { assert ($1 == 206); $$ = $1; } -| n207 "207" { assert ($1 == 207); $$ = $1; } -| n208 "208" { assert ($1 == 208); $$ = $1; } -| n209 "209" { assert ($1 == 209); $$ = $1; } -| n210 "210" { assert ($1 == 210); $$ = $1; } -| n211 "211" { assert ($1 == 211); $$ = $1; } -| n212 "212" { assert ($1 == 212); $$ = $1; } -| n213 "213" { assert ($1 == 213); $$ = $1; } -| n214 "214" { assert ($1 == 214); $$ = $1; } -| n215 "215" { assert ($1 == 215); $$ = $1; } -| n216 "216" { assert ($1 == 216); $$ = $1; } -| n217 "217" { assert ($1 == 217); $$ = $1; } -| n218 "218" { assert ($1 == 218); $$ = $1; } -| n219 "219" { assert ($1 == 219); $$ = $1; } -| n220 "220" { assert ($1 == 220); $$ = $1; } -| n221 "221" { assert ($1 == 221); $$ = $1; } -| n222 "222" { assert ($1 == 222); $$ = $1; } -| n223 "223" { assert ($1 == 223); $$ = $1; } -| n224 "224" { assert ($1 == 224); $$ = $1; } -| n225 "225" { assert ($1 == 225); $$ = $1; } -| n226 "226" { assert ($1 == 226); $$ = $1; } -| n227 "227" { assert ($1 == 227); $$ = $1; } -| n228 "228" { assert ($1 == 228); $$ = $1; } -| n229 "229" { assert ($1 == 229); $$ = $1; } -| n230 "230" { assert ($1 == 230); $$ = $1; } -| n231 "231" { assert ($1 == 231); $$ = $1; } -| n232 "232" { assert ($1 == 232); $$ = $1; } -| n233 "233" { assert ($1 == 233); $$ = $1; } -| n234 "234" { assert ($1 == 234); $$ = $1; } -| n235 "235" { assert ($1 == 235); $$ = $1; } -| n236 "236" { assert ($1 == 236); $$ = $1; } -| n237 "237" { assert ($1 == 237); $$ = $1; } -| n238 "238" { assert ($1 == 238); $$ = $1; } -| n239 "239" { assert ($1 == 239); $$ = $1; } -| n240 "240" { assert ($1 == 240); $$ = $1; } -| n241 "241" { assert ($1 == 241); $$ = $1; } -| n242 "242" { assert ($1 == 242); $$ = $1; } -| n243 "243" { assert ($1 == 243); $$ = $1; } -| n244 "244" { assert ($1 == 244); $$ = $1; } -| n245 "245" { assert ($1 == 245); $$ = $1; } -| n246 "246" { assert ($1 == 246); $$ = $1; } -| n247 "247" { assert ($1 == 247); $$ = $1; } -| n248 "248" { assert ($1 == 248); $$ = $1; } -| n249 "249" { assert ($1 == 249); $$ = $1; } -| n250 "250" { assert ($1 == 250); $$ = $1; } -| n251 "251" { assert ($1 == 251); $$ = $1; } -| n252 "252" { assert ($1 == 252); $$ = $1; } -| n253 "253" { assert ($1 == 253); $$ = $1; } -| n254 "254" { assert ($1 == 254); $$ = $1; } -| n255 "255" { assert ($1 == 255); $$ = $1; } -| n256 "256" { assert ($1 == 256); $$ = $1; } -| n257 "257" { assert ($1 == 257); $$ = $1; } -| n258 "258" { assert ($1 == 258); $$ = $1; } -| n259 "259" { assert ($1 == 259); $$ = $1; } -| n260 "260" { assert ($1 == 260); $$ = $1; } -| n261 "261" { assert ($1 == 261); $$ = $1; } -| n262 "262" { assert ($1 == 262); $$ = $1; } -| n263 "263" { assert ($1 == 263); $$ = $1; } -| n264 "264" { assert ($1 == 264); $$ = $1; } -| n265 "265" { assert ($1 == 265); $$ = $1; } -| n266 "266" { assert ($1 == 266); $$ = $1; } -| n267 "267" { assert ($1 == 267); $$ = $1; } -| n268 "268" { assert ($1 == 268); $$ = $1; } -| n269 "269" { assert ($1 == 269); $$ = $1; } -| n270 "270" { assert ($1 == 270); $$ = $1; } -| n271 "271" { assert ($1 == 271); $$ = $1; } -| n272 "272" { assert ($1 == 272); $$ = $1; } -| n273 "273" { assert ($1 == 273); $$ = $1; } -| n274 "274" { assert ($1 == 274); $$ = $1; } -| n275 "275" { assert ($1 == 275); $$ = $1; } -| n276 "276" { assert ($1 == 276); $$ = $1; } -| n277 "277" { assert ($1 == 277); $$ = $1; } -| n278 "278" { assert ($1 == 278); $$ = $1; } -| n279 "279" { assert ($1 == 279); $$ = $1; } -| n280 "280" { assert ($1 == 280); $$ = $1; } -| n281 "281" { assert ($1 == 281); $$ = $1; } -| n282 "282" { assert ($1 == 282); $$ = $1; } -| n283 "283" { assert ($1 == 283); $$ = $1; } -| n284 "284" { assert ($1 == 284); $$ = $1; } -| n285 "285" { assert ($1 == 285); $$ = $1; } -| n286 "286" { assert ($1 == 286); $$ = $1; } -| n287 "287" { assert ($1 == 287); $$ = $1; } -| n288 "288" { assert ($1 == 288); $$ = $1; } -| n289 "289" { assert ($1 == 289); $$ = $1; } -| n290 "290" { assert ($1 == 290); $$ = $1; } -| n291 "291" { assert ($1 == 291); $$ = $1; } -| n292 "292" { assert ($1 == 292); $$ = $1; } -| n293 "293" { assert ($1 == 293); $$ = $1; } -| n294 "294" { assert ($1 == 294); $$ = $1; } -| n295 "295" { assert ($1 == 295); $$ = $1; } -| n296 "296" { assert ($1 == 296); $$ = $1; } -| n297 "297" { assert ($1 == 297); $$ = $1; } -| n298 "298" { assert ($1 == 298); $$ = $1; } -| n299 "299" { assert ($1 == 299); $$ = $1; } -| n300 "300" { assert ($1 == 300); $$ = $1; } -| n301 "301" { assert ($1 == 301); $$ = $1; } -| n302 "302" { assert ($1 == 302); $$ = $1; } -| n303 "303" { assert ($1 == 303); $$ = $1; } -| n304 "304" { assert ($1 == 304); $$ = $1; } -| n305 "305" { assert ($1 == 305); $$ = $1; } -| n306 "306" { assert ($1 == 306); $$ = $1; } -| n307 "307" { assert ($1 == 307); $$ = $1; } -| n308 "308" { assert ($1 == 308); $$ = $1; } -| n309 "309" { assert ($1 == 309); $$ = $1; } -| n310 "310" { assert ($1 == 310); $$ = $1; } -| n311 "311" { assert ($1 == 311); $$ = $1; } -| n312 "312" { assert ($1 == 312); $$ = $1; } -| n313 "313" { assert ($1 == 313); $$ = $1; } -| n314 "314" { assert ($1 == 314); $$ = $1; } -| n315 "315" { assert ($1 == 315); $$ = $1; } -| n316 "316" { assert ($1 == 316); $$ = $1; } -| n317 "317" { assert ($1 == 317); $$ = $1; } -| n318 "318" { assert ($1 == 318); $$ = $1; } -| n319 "319" { assert ($1 == 319); $$ = $1; } -| n320 "320" { assert ($1 == 320); $$ = $1; } -| n321 "321" { assert ($1 == 321); $$ = $1; } -| n322 "322" { assert ($1 == 322); $$ = $1; } -| n323 "323" { assert ($1 == 323); $$ = $1; } -| n324 "324" { assert ($1 == 324); $$ = $1; } -| n325 "325" { assert ($1 == 325); $$ = $1; } -| n326 "326" { assert ($1 == 326); $$ = $1; } -| n327 "327" { assert ($1 == 327); $$ = $1; } -| n328 "328" { assert ($1 == 328); $$ = $1; } -| n329 "329" { assert ($1 == 329); $$ = $1; } -| n330 "330" { assert ($1 == 330); $$ = $1; } -| n331 "331" { assert ($1 == 331); $$ = $1; } -| n332 "332" { assert ($1 == 332); $$ = $1; } -| n333 "333" { assert ($1 == 333); $$ = $1; } -| n334 "334" { assert ($1 == 334); $$ = $1; } -| n335 "335" { assert ($1 == 335); $$ = $1; } -| n336 "336" { assert ($1 == 336); $$ = $1; } -| n337 "337" { assert ($1 == 337); $$ = $1; } -| n338 "338" { assert ($1 == 338); $$ = $1; } -| n339 "339" { assert ($1 == 339); $$ = $1; } -| n340 "340" { assert ($1 == 340); $$ = $1; } -| n341 "341" { assert ($1 == 341); $$ = $1; } -| n342 "342" { assert ($1 == 342); $$ = $1; } -| n343 "343" { assert ($1 == 343); $$ = $1; } -| n344 "344" { assert ($1 == 344); $$ = $1; } -| n345 "345" { assert ($1 == 345); $$ = $1; } -| n346 "346" { assert ($1 == 346); $$ = $1; } -| n347 "347" { assert ($1 == 347); $$ = $1; } -| n348 "348" { assert ($1 == 348); $$ = $1; } -| n349 "349" { assert ($1 == 349); $$ = $1; } -| n350 "350" { assert ($1 == 350); $$ = $1; } -| n351 "351" { assert ($1 == 351); $$ = $1; } -| n352 "352" { assert ($1 == 352); $$ = $1; } -| n353 "353" { assert ($1 == 353); $$ = $1; } -| n354 "354" { assert ($1 == 354); $$ = $1; } -| n355 "355" { assert ($1 == 355); $$ = $1; } -| n356 "356" { assert ($1 == 356); $$ = $1; } -| n357 "357" { assert ($1 == 357); $$ = $1; } -| n358 "358" { assert ($1 == 358); $$ = $1; } -| n359 "359" { assert ($1 == 359); $$ = $1; } -| n360 "360" { assert ($1 == 360); $$ = $1; } -| n361 "361" { assert ($1 == 361); $$ = $1; } -| n362 "362" { assert ($1 == 362); $$ = $1; } -| n363 "363" { assert ($1 == 363); $$ = $1; } -| n364 "364" { assert ($1 == 364); $$ = $1; } -| n365 "365" { assert ($1 == 365); $$ = $1; } -| n366 "366" { assert ($1 == 366); $$ = $1; } -| n367 "367" { assert ($1 == 367); $$ = $1; } -| n368 "368" { assert ($1 == 368); $$ = $1; } -| n369 "369" { assert ($1 == 369); $$ = $1; } -| n370 "370" { assert ($1 == 370); $$ = $1; } -| n371 "371" { assert ($1 == 371); $$ = $1; } -| n372 "372" { assert ($1 == 372); $$ = $1; } -| n373 "373" { assert ($1 == 373); $$ = $1; } -| n374 "374" { assert ($1 == 374); $$ = $1; } -| n375 "375" { assert ($1 == 375); $$ = $1; } -| n376 "376" { assert ($1 == 376); $$ = $1; } -| n377 "377" { assert ($1 == 377); $$ = $1; } -| n378 "378" { assert ($1 == 378); $$ = $1; } -| n379 "379" { assert ($1 == 379); $$ = $1; } -| n380 "380" { assert ($1 == 380); $$ = $1; } -| n381 "381" { assert ($1 == 381); $$ = $1; } -| n382 "382" { assert ($1 == 382); $$ = $1; } -| n383 "383" { assert ($1 == 383); $$ = $1; } -| n384 "384" { assert ($1 == 384); $$ = $1; } -| n385 "385" { assert ($1 == 385); $$ = $1; } -| n386 "386" { assert ($1 == 386); $$ = $1; } -| n387 "387" { assert ($1 == 387); $$ = $1; } -| n388 "388" { assert ($1 == 388); $$ = $1; } -| n389 "389" { assert ($1 == 389); $$ = $1; } -| n390 "390" { assert ($1 == 390); $$ = $1; } -| n391 "391" { assert ($1 == 391); $$ = $1; } -| n392 "392" { assert ($1 == 392); $$ = $1; } -| n393 "393" { assert ($1 == 393); $$ = $1; } -| n394 "394" { assert ($1 == 394); $$ = $1; } -| n395 "395" { assert ($1 == 395); $$ = $1; } -| n396 "396" { assert ($1 == 396); $$ = $1; } -| n397 "397" { assert ($1 == 397); $$ = $1; } -| n398 "398" { assert ($1 == 398); $$ = $1; } -| n399 "399" { assert ($1 == 399); $$ = $1; } -| n400 "400" { assert ($1 == 400); $$ = $1; } -| n401 "401" { assert ($1 == 401); $$ = $1; } -| n402 "402" { assert ($1 == 402); $$ = $1; } -| n403 "403" { assert ($1 == 403); $$ = $1; } -| n404 "404" { assert ($1 == 404); $$ = $1; } -| n405 "405" { assert ($1 == 405); $$ = $1; } -| n406 "406" { assert ($1 == 406); $$ = $1; } -| n407 "407" { assert ($1 == 407); $$ = $1; } -| n408 "408" { assert ($1 == 408); $$ = $1; } -| n409 "409" { assert ($1 == 409); $$ = $1; } -| n410 "410" { assert ($1 == 410); $$ = $1; } -| n411 "411" { assert ($1 == 411); $$ = $1; } -| n412 "412" { assert ($1 == 412); $$ = $1; } -| n413 "413" { assert ($1 == 413); $$ = $1; } -| n414 "414" { assert ($1 == 414); $$ = $1; } -| n415 "415" { assert ($1 == 415); $$ = $1; } -| n416 "416" { assert ($1 == 416); $$ = $1; } -| n417 "417" { assert ($1 == 417); $$ = $1; } -| n418 "418" { assert ($1 == 418); $$ = $1; } -| n419 "419" { assert ($1 == 419); $$ = $1; } -| n420 "420" { assert ($1 == 420); $$ = $1; } -| n421 "421" { assert ($1 == 421); $$ = $1; } -| n422 "422" { assert ($1 == 422); $$ = $1; } -| n423 "423" { assert ($1 == 423); $$ = $1; } -| n424 "424" { assert ($1 == 424); $$ = $1; } -| n425 "425" { assert ($1 == 425); $$ = $1; } -| n426 "426" { assert ($1 == 426); $$ = $1; } -| n427 "427" { assert ($1 == 427); $$ = $1; } -| n428 "428" { assert ($1 == 428); $$ = $1; } -| n429 "429" { assert ($1 == 429); $$ = $1; } -| n430 "430" { assert ($1 == 430); $$ = $1; } -| n431 "431" { assert ($1 == 431); $$ = $1; } -| n432 "432" { assert ($1 == 432); $$ = $1; } -| n433 "433" { assert ($1 == 433); $$ = $1; } -| n434 "434" { assert ($1 == 434); $$ = $1; } -| n435 "435" { assert ($1 == 435); $$ = $1; } -| n436 "436" { assert ($1 == 436); $$ = $1; } -| n437 "437" { assert ($1 == 437); $$ = $1; } -| n438 "438" { assert ($1 == 438); $$ = $1; } -| n439 "439" { assert ($1 == 439); $$ = $1; } -| n440 "440" { assert ($1 == 440); $$ = $1; } -| n441 "441" { assert ($1 == 441); $$ = $1; } -| n442 "442" { assert ($1 == 442); $$ = $1; } -| n443 "443" { assert ($1 == 443); $$ = $1; } -| n444 "444" { assert ($1 == 444); $$ = $1; } -| n445 "445" { assert ($1 == 445); $$ = $1; } -| n446 "446" { assert ($1 == 446); $$ = $1; } -| n447 "447" { assert ($1 == 447); $$ = $1; } -| n448 "448" { assert ($1 == 448); $$ = $1; } -| n449 "449" { assert ($1 == 449); $$ = $1; } -| n450 "450" { assert ($1 == 450); $$ = $1; } -| n451 "451" { assert ($1 == 451); $$ = $1; } -| n452 "452" { assert ($1 == 452); $$ = $1; } -| n453 "453" { assert ($1 == 453); $$ = $1; } -| n454 "454" { assert ($1 == 454); $$ = $1; } -| n455 "455" { assert ($1 == 455); $$ = $1; } -| n456 "456" { assert ($1 == 456); $$ = $1; } -| n457 "457" { assert ($1 == 457); $$ = $1; } -| n458 "458" { assert ($1 == 458); $$ = $1; } -| n459 "459" { assert ($1 == 459); $$ = $1; } -| n460 "460" { assert ($1 == 460); $$ = $1; } -| n461 "461" { assert ($1 == 461); $$ = $1; } -| n462 "462" { assert ($1 == 462); $$ = $1; } -| n463 "463" { assert ($1 == 463); $$ = $1; } -| n464 "464" { assert ($1 == 464); $$ = $1; } -| n465 "465" { assert ($1 == 465); $$ = $1; } -| n466 "466" { assert ($1 == 466); $$ = $1; } -| n467 "467" { assert ($1 == 467); $$ = $1; } -| n468 "468" { assert ($1 == 468); $$ = $1; } -| n469 "469" { assert ($1 == 469); $$ = $1; } -| n470 "470" { assert ($1 == 470); $$ = $1; } -| n471 "471" { assert ($1 == 471); $$ = $1; } -| n472 "472" { assert ($1 == 472); $$ = $1; } -| n473 "473" { assert ($1 == 473); $$ = $1; } -| n474 "474" { assert ($1 == 474); $$ = $1; } -| n475 "475" { assert ($1 == 475); $$ = $1; } -| n476 "476" { assert ($1 == 476); $$ = $1; } -| n477 "477" { assert ($1 == 477); $$ = $1; } -| n478 "478" { assert ($1 == 478); $$ = $1; } -| n479 "479" { assert ($1 == 479); $$ = $1; } -| n480 "480" { assert ($1 == 480); $$ = $1; } -| n481 "481" { assert ($1 == 481); $$ = $1; } -| n482 "482" { assert ($1 == 482); $$ = $1; } -| n483 "483" { assert ($1 == 483); $$ = $1; } -| n484 "484" { assert ($1 == 484); $$ = $1; } -| n485 "485" { assert ($1 == 485); $$ = $1; } -| n486 "486" { assert ($1 == 486); $$ = $1; } -| n487 "487" { assert ($1 == 487); $$ = $1; } -| n488 "488" { assert ($1 == 488); $$ = $1; } -| n489 "489" { assert ($1 == 489); $$ = $1; } -| n490 "490" { assert ($1 == 490); $$ = $1; } -| n491 "491" { assert ($1 == 491); $$ = $1; } -| n492 "492" { assert ($1 == 492); $$ = $1; } -| n493 "493" { assert ($1 == 493); $$ = $1; } -| n494 "494" { assert ($1 == 494); $$ = $1; } -| n495 "495" { assert ($1 == 495); $$ = $1; } -| n496 "496" { assert ($1 == 496); $$ = $1; } -| n497 "497" { assert ($1 == 497); $$ = $1; } -| n498 "498" { assert ($1 == 498); $$ = $1; } -| n499 "499" { assert ($1 == 499); $$ = $1; } -| n500 "500" { assert ($1 == 500); $$ = $1; } -| n501 "501" { assert ($1 == 501); $$ = $1; } -| n502 "502" { assert ($1 == 502); $$ = $1; } -| n503 "503" { assert ($1 == 503); $$ = $1; } -| n504 "504" { assert ($1 == 504); $$ = $1; } -| n505 "505" { assert ($1 == 505); $$ = $1; } -| n506 "506" { assert ($1 == 506); $$ = $1; } -| n507 "507" { assert ($1 == 507); $$ = $1; } -| n508 "508" { assert ($1 == 508); $$ = $1; } -| n509 "509" { assert ($1 == 509); $$ = $1; } -| n510 "510" { assert ($1 == 510); $$ = $1; } -| n511 "511" { assert ($1 == 511); $$ = $1; } -| n512 "512" { assert ($1 == 512); $$ = $1; } -| n513 "513" { assert ($1 == 513); $$ = $1; } -| n514 "514" { assert ($1 == 514); $$ = $1; } -| n515 "515" { assert ($1 == 515); $$ = $1; } -| n516 "516" { assert ($1 == 516); $$ = $1; } -| n517 "517" { assert ($1 == 517); $$ = $1; } -| n518 "518" { assert ($1 == 518); $$ = $1; } -| n519 "519" { assert ($1 == 519); $$ = $1; } -| n520 "520" { assert ($1 == 520); $$ = $1; } -| n521 "521" { assert ($1 == 521); $$ = $1; } -| n522 "522" { assert ($1 == 522); $$ = $1; } -| n523 "523" { assert ($1 == 523); $$ = $1; } -| n524 "524" { assert ($1 == 524); $$ = $1; } -| n525 "525" { assert ($1 == 525); $$ = $1; } -| n526 "526" { assert ($1 == 526); $$ = $1; } -| n527 "527" { assert ($1 == 527); $$ = $1; } -| n528 "528" { assert ($1 == 528); $$ = $1; } -| n529 "529" { assert ($1 == 529); $$ = $1; } -| n530 "530" { assert ($1 == 530); $$ = $1; } -| n531 "531" { assert ($1 == 531); $$ = $1; } -| n532 "532" { assert ($1 == 532); $$ = $1; } -| n533 "533" { assert ($1 == 533); $$ = $1; } -| n534 "534" { assert ($1 == 534); $$ = $1; } -| n535 "535" { assert ($1 == 535); $$ = $1; } -| n536 "536" { assert ($1 == 536); $$ = $1; } -| n537 "537" { assert ($1 == 537); $$ = $1; } -| n538 "538" { assert ($1 == 538); $$ = $1; } -| n539 "539" { assert ($1 == 539); $$ = $1; } -| n540 "540" { assert ($1 == 540); $$ = $1; } -| n541 "541" { assert ($1 == 541); $$ = $1; } -| n542 "542" { assert ($1 == 542); $$ = $1; } -| n543 "543" { assert ($1 == 543); $$ = $1; } -| n544 "544" { assert ($1 == 544); $$ = $1; } -| n545 "545" { assert ($1 == 545); $$ = $1; } -| n546 "546" { assert ($1 == 546); $$ = $1; } -| n547 "547" { assert ($1 == 547); $$ = $1; } -| n548 "548" { assert ($1 == 548); $$ = $1; } -| n549 "549" { assert ($1 == 549); $$ = $1; } -| n550 "550" { assert ($1 == 550); $$ = $1; } -| n551 "551" { assert ($1 == 551); $$ = $1; } -| n552 "552" { assert ($1 == 552); $$ = $1; } -| n553 "553" { assert ($1 == 553); $$ = $1; } -| n554 "554" { assert ($1 == 554); $$ = $1; } -| n555 "555" { assert ($1 == 555); $$ = $1; } -| n556 "556" { assert ($1 == 556); $$ = $1; } -| n557 "557" { assert ($1 == 557); $$ = $1; } -| n558 "558" { assert ($1 == 558); $$ = $1; } -| n559 "559" { assert ($1 == 559); $$ = $1; } -| n560 "560" { assert ($1 == 560); $$ = $1; } -| n561 "561" { assert ($1 == 561); $$ = $1; } -| n562 "562" { assert ($1 == 562); $$ = $1; } -| n563 "563" { assert ($1 == 563); $$ = $1; } -| n564 "564" { assert ($1 == 564); $$ = $1; } -| n565 "565" { assert ($1 == 565); $$ = $1; } -| n566 "566" { assert ($1 == 566); $$ = $1; } -| n567 "567" { assert ($1 == 567); $$ = $1; } -| n568 "568" { assert ($1 == 568); $$ = $1; } -| n569 "569" { assert ($1 == 569); $$ = $1; } -| n570 "570" { assert ($1 == 570); $$ = $1; } -| n571 "571" { assert ($1 == 571); $$ = $1; } -| n572 "572" { assert ($1 == 572); $$ = $1; } -| n573 "573" { assert ($1 == 573); $$ = $1; } -| n574 "574" { assert ($1 == 574); $$ = $1; } -| n575 "575" { assert ($1 == 575); $$ = $1; } -| n576 "576" { assert ($1 == 576); $$ = $1; } -| n577 "577" { assert ($1 == 577); $$ = $1; } -| n578 "578" { assert ($1 == 578); $$ = $1; } -| n579 "579" { assert ($1 == 579); $$ = $1; } -| n580 "580" { assert ($1 == 580); $$ = $1; } -| n581 "581" { assert ($1 == 581); $$ = $1; } -| n582 "582" { assert ($1 == 582); $$ = $1; } -| n583 "583" { assert ($1 == 583); $$ = $1; } -| n584 "584" { assert ($1 == 584); $$ = $1; } -| n585 "585" { assert ($1 == 585); $$ = $1; } -| n586 "586" { assert ($1 == 586); $$ = $1; } -| n587 "587" { assert ($1 == 587); $$ = $1; } -| n588 "588" { assert ($1 == 588); $$ = $1; } -| n589 "589" { assert ($1 == 589); $$ = $1; } -| n590 "590" { assert ($1 == 590); $$ = $1; } -| n591 "591" { assert ($1 == 591); $$ = $1; } -| n592 "592" { assert ($1 == 592); $$ = $1; } -| n593 "593" { assert ($1 == 593); $$ = $1; } -| n594 "594" { assert ($1 == 594); $$ = $1; } -| n595 "595" { assert ($1 == 595); $$ = $1; } -| n596 "596" { assert ($1 == 596); $$ = $1; } -| n597 "597" { assert ($1 == 597); $$ = $1; } -| n598 "598" { assert ($1 == 598); $$ = $1; } -| n599 "599" { assert ($1 == 599); $$ = $1; } -| n600 "600" { assert ($1 == 600); $$ = $1; } -| n601 "601" { assert ($1 == 601); $$ = $1; } -| n602 "602" { assert ($1 == 602); $$ = $1; } -| n603 "603" { assert ($1 == 603); $$ = $1; } -| n604 "604" { assert ($1 == 604); $$ = $1; } -| n605 "605" { assert ($1 == 605); $$ = $1; } -| n606 "606" { assert ($1 == 606); $$ = $1; } -| n607 "607" { assert ($1 == 607); $$ = $1; } -| n608 "608" { assert ($1 == 608); $$ = $1; } -| n609 "609" { assert ($1 == 609); $$ = $1; } -| n610 "610" { assert ($1 == 610); $$ = $1; } -| n611 "611" { assert ($1 == 611); $$ = $1; } -| n612 "612" { assert ($1 == 612); $$ = $1; } -| n613 "613" { assert ($1 == 613); $$ = $1; } -| n614 "614" { assert ($1 == 614); $$ = $1; } -| n615 "615" { assert ($1 == 615); $$ = $1; } -| n616 "616" { assert ($1 == 616); $$ = $1; } -| n617 "617" { assert ($1 == 617); $$ = $1; } -| n618 "618" { assert ($1 == 618); $$ = $1; } -| n619 "619" { assert ($1 == 619); $$ = $1; } -| n620 "620" { assert ($1 == 620); $$ = $1; } -| n621 "621" { assert ($1 == 621); $$ = $1; } -| n622 "622" { assert ($1 == 622); $$ = $1; } -| n623 "623" { assert ($1 == 623); $$ = $1; } -| n624 "624" { assert ($1 == 624); $$ = $1; } -| n625 "625" { assert ($1 == 625); $$ = $1; } -| n626 "626" { assert ($1 == 626); $$ = $1; } -| n627 "627" { assert ($1 == 627); $$ = $1; } -| n628 "628" { assert ($1 == 628); $$ = $1; } -| n629 "629" { assert ($1 == 629); $$ = $1; } -| n630 "630" { assert ($1 == 630); $$ = $1; } -| n631 "631" { assert ($1 == 631); $$ = $1; } -| n632 "632" { assert ($1 == 632); $$ = $1; } -| n633 "633" { assert ($1 == 633); $$ = $1; } -| n634 "634" { assert ($1 == 634); $$ = $1; } -| n635 "635" { assert ($1 == 635); $$ = $1; } -| n636 "636" { assert ($1 == 636); $$ = $1; } -| n637 "637" { assert ($1 == 637); $$ = $1; } -| n638 "638" { assert ($1 == 638); $$ = $1; } -| n639 "639" { assert ($1 == 639); $$ = $1; } -| n640 "640" { assert ($1 == 640); $$ = $1; } -| n641 "641" { assert ($1 == 641); $$ = $1; } -| n642 "642" { assert ($1 == 642); $$ = $1; } -| n643 "643" { assert ($1 == 643); $$ = $1; } -| n644 "644" { assert ($1 == 644); $$ = $1; } -| n645 "645" { assert ($1 == 645); $$ = $1; } -| n646 "646" { assert ($1 == 646); $$ = $1; } -| n647 "647" { assert ($1 == 647); $$ = $1; } -| n648 "648" { assert ($1 == 648); $$ = $1; } -| n649 "649" { assert ($1 == 649); $$ = $1; } -| n650 "650" { assert ($1 == 650); $$ = $1; } -| n651 "651" { assert ($1 == 651); $$ = $1; } -| n652 "652" { assert ($1 == 652); $$ = $1; } -| n653 "653" { assert ($1 == 653); $$ = $1; } -| n654 "654" { assert ($1 == 654); $$ = $1; } -| n655 "655" { assert ($1 == 655); $$ = $1; } -| n656 "656" { assert ($1 == 656); $$ = $1; } -| n657 "657" { assert ($1 == 657); $$ = $1; } -| n658 "658" { assert ($1 == 658); $$ = $1; } -| n659 "659" { assert ($1 == 659); $$ = $1; } -| n660 "660" { assert ($1 == 660); $$ = $1; } -| n661 "661" { assert ($1 == 661); $$ = $1; } -| n662 "662" { assert ($1 == 662); $$ = $1; } -| n663 "663" { assert ($1 == 663); $$ = $1; } -| n664 "664" { assert ($1 == 664); $$ = $1; } -| n665 "665" { assert ($1 == 665); $$ = $1; } -| n666 "666" { assert ($1 == 666); $$ = $1; } -| n667 "667" { assert ($1 == 667); $$ = $1; } -| n668 "668" { assert ($1 == 668); $$ = $1; } -| n669 "669" { assert ($1 == 669); $$ = $1; } -| n670 "670" { assert ($1 == 670); $$ = $1; } -| n671 "671" { assert ($1 == 671); $$ = $1; } -| n672 "672" { assert ($1 == 672); $$ = $1; } -| n673 "673" { assert ($1 == 673); $$ = $1; } -| n674 "674" { assert ($1 == 674); $$ = $1; } -| n675 "675" { assert ($1 == 675); $$ = $1; } -| n676 "676" { assert ($1 == 676); $$ = $1; } -| n677 "677" { assert ($1 == 677); $$ = $1; } -| n678 "678" { assert ($1 == 678); $$ = $1; } -| n679 "679" { assert ($1 == 679); $$ = $1; } -| n680 "680" { assert ($1 == 680); $$ = $1; } -| n681 "681" { assert ($1 == 681); $$ = $1; } -| n682 "682" { assert ($1 == 682); $$ = $1; } -| n683 "683" { assert ($1 == 683); $$ = $1; } -| n684 "684" { assert ($1 == 684); $$ = $1; } -| n685 "685" { assert ($1 == 685); $$ = $1; } -| n686 "686" { assert ($1 == 686); $$ = $1; } -| n687 "687" { assert ($1 == 687); $$ = $1; } -| n688 "688" { assert ($1 == 688); $$ = $1; } -| n689 "689" { assert ($1 == 689); $$ = $1; } -| n690 "690" { assert ($1 == 690); $$ = $1; } -| n691 "691" { assert ($1 == 691); $$ = $1; } -| n692 "692" { assert ($1 == 692); $$ = $1; } -| n693 "693" { assert ($1 == 693); $$ = $1; } -| n694 "694" { assert ($1 == 694); $$ = $1; } -| n695 "695" { assert ($1 == 695); $$ = $1; } -| n696 "696" { assert ($1 == 696); $$ = $1; } -| n697 "697" { assert ($1 == 697); $$ = $1; } -| n698 "698" { assert ($1 == 698); $$ = $1; } -| n699 "699" { assert ($1 == 699); $$ = $1; } -| n700 "700" { assert ($1 == 700); $$ = $1; } -| n701 "701" { assert ($1 == 701); $$ = $1; } -| n702 "702" { assert ($1 == 702); $$ = $1; } -| n703 "703" { assert ($1 == 703); $$ = $1; } -| n704 "704" { assert ($1 == 704); $$ = $1; } -| n705 "705" { assert ($1 == 705); $$ = $1; } -| n706 "706" { assert ($1 == 706); $$ = $1; } -| n707 "707" { assert ($1 == 707); $$ = $1; } -| n708 "708" { assert ($1 == 708); $$ = $1; } -| n709 "709" { assert ($1 == 709); $$ = $1; } -| n710 "710" { assert ($1 == 710); $$ = $1; } -| n711 "711" { assert ($1 == 711); $$ = $1; } -| n712 "712" { assert ($1 == 712); $$ = $1; } -| n713 "713" { assert ($1 == 713); $$ = $1; } -| n714 "714" { assert ($1 == 714); $$ = $1; } -| n715 "715" { assert ($1 == 715); $$ = $1; } -| n716 "716" { assert ($1 == 716); $$ = $1; } -| n717 "717" { assert ($1 == 717); $$ = $1; } -| n718 "718" { assert ($1 == 718); $$ = $1; } -| n719 "719" { assert ($1 == 719); $$ = $1; } -| n720 "720" { assert ($1 == 720); $$ = $1; } -| n721 "721" { assert ($1 == 721); $$ = $1; } -| n722 "722" { assert ($1 == 722); $$ = $1; } -| n723 "723" { assert ($1 == 723); $$ = $1; } -| n724 "724" { assert ($1 == 724); $$ = $1; } -| n725 "725" { assert ($1 == 725); $$ = $1; } -| n726 "726" { assert ($1 == 726); $$ = $1; } -| n727 "727" { assert ($1 == 727); $$ = $1; } -| n728 "728" { assert ($1 == 728); $$ = $1; } -| n729 "729" { assert ($1 == 729); $$ = $1; } -| n730 "730" { assert ($1 == 730); $$ = $1; } -| n731 "731" { assert ($1 == 731); $$ = $1; } -| n732 "732" { assert ($1 == 732); $$ = $1; } -| n733 "733" { assert ($1 == 733); $$ = $1; } -| n734 "734" { assert ($1 == 734); $$ = $1; } -| n735 "735" { assert ($1 == 735); $$ = $1; } -| n736 "736" { assert ($1 == 736); $$ = $1; } -| n737 "737" { assert ($1 == 737); $$ = $1; } -| n738 "738" { assert ($1 == 738); $$ = $1; } -| n739 "739" { assert ($1 == 739); $$ = $1; } -| n740 "740" { assert ($1 == 740); $$ = $1; } -| n741 "741" { assert ($1 == 741); $$ = $1; } -| n742 "742" { assert ($1 == 742); $$ = $1; } -| n743 "743" { assert ($1 == 743); $$ = $1; } -| n744 "744" { assert ($1 == 744); $$ = $1; } -| n745 "745" { assert ($1 == 745); $$ = $1; } -| n746 "746" { assert ($1 == 746); $$ = $1; } -| n747 "747" { assert ($1 == 747); $$ = $1; } -| n748 "748" { assert ($1 == 748); $$ = $1; } -| n749 "749" { assert ($1 == 749); $$ = $1; } -| n750 "750" { assert ($1 == 750); $$ = $1; } -| n751 "751" { assert ($1 == 751); $$ = $1; } -| n752 "752" { assert ($1 == 752); $$ = $1; } -| n753 "753" { assert ($1 == 753); $$ = $1; } -| n754 "754" { assert ($1 == 754); $$ = $1; } -| n755 "755" { assert ($1 == 755); $$ = $1; } -| n756 "756" { assert ($1 == 756); $$ = $1; } -| n757 "757" { assert ($1 == 757); $$ = $1; } -| n758 "758" { assert ($1 == 758); $$ = $1; } -| n759 "759" { assert ($1 == 759); $$ = $1; } -| n760 "760" { assert ($1 == 760); $$ = $1; } -| n761 "761" { assert ($1 == 761); $$ = $1; } -| n762 "762" { assert ($1 == 762); $$ = $1; } -| n763 "763" { assert ($1 == 763); $$ = $1; } -| n764 "764" { assert ($1 == 764); $$ = $1; } -| n765 "765" { assert ($1 == 765); $$ = $1; } -| n766 "766" { assert ($1 == 766); $$ = $1; } -| n767 "767" { assert ($1 == 767); $$ = $1; } -| n768 "768" { assert ($1 == 768); $$ = $1; } -| n769 "769" { assert ($1 == 769); $$ = $1; } -| n770 "770" { assert ($1 == 770); $$ = $1; } -| n771 "771" { assert ($1 == 771); $$ = $1; } -| n772 "772" { assert ($1 == 772); $$ = $1; } -| n773 "773" { assert ($1 == 773); $$ = $1; } -| n774 "774" { assert ($1 == 774); $$ = $1; } -| n775 "775" { assert ($1 == 775); $$ = $1; } -| n776 "776" { assert ($1 == 776); $$ = $1; } -| n777 "777" { assert ($1 == 777); $$ = $1; } -| n778 "778" { assert ($1 == 778); $$ = $1; } -| n779 "779" { assert ($1 == 779); $$ = $1; } -| n780 "780" { assert ($1 == 780); $$ = $1; } -| n781 "781" { assert ($1 == 781); $$ = $1; } -| n782 "782" { assert ($1 == 782); $$ = $1; } -| n783 "783" { assert ($1 == 783); $$ = $1; } -| n784 "784" { assert ($1 == 784); $$ = $1; } -| n785 "785" { assert ($1 == 785); $$ = $1; } -| n786 "786" { assert ($1 == 786); $$ = $1; } -| n787 "787" { assert ($1 == 787); $$ = $1; } -| n788 "788" { assert ($1 == 788); $$ = $1; } -| n789 "789" { assert ($1 == 789); $$ = $1; } -| n790 "790" { assert ($1 == 790); $$ = $1; } -| n791 "791" { assert ($1 == 791); $$ = $1; } -| n792 "792" { assert ($1 == 792); $$ = $1; } -| n793 "793" { assert ($1 == 793); $$ = $1; } -| n794 "794" { assert ($1 == 794); $$ = $1; } -| n795 "795" { assert ($1 == 795); $$ = $1; } -| n796 "796" { assert ($1 == 796); $$ = $1; } -| n797 "797" { assert ($1 == 797); $$ = $1; } -| n798 "798" { assert ($1 == 798); $$ = $1; } -| n799 "799" { assert ($1 == 799); $$ = $1; } -| n800 "800" { assert ($1 == 800); $$ = $1; } -| n801 "801" { assert ($1 == 801); $$ = $1; } -| n802 "802" { assert ($1 == 802); $$ = $1; } -| n803 "803" { assert ($1 == 803); $$ = $1; } -| n804 "804" { assert ($1 == 804); $$ = $1; } -| n805 "805" { assert ($1 == 805); $$ = $1; } -| n806 "806" { assert ($1 == 806); $$ = $1; } -| n807 "807" { assert ($1 == 807); $$ = $1; } -| n808 "808" { assert ($1 == 808); $$ = $1; } -| n809 "809" { assert ($1 == 809); $$ = $1; } -| n810 "810" { assert ($1 == 810); $$ = $1; } -| n811 "811" { assert ($1 == 811); $$ = $1; } -| n812 "812" { assert ($1 == 812); $$ = $1; } -| n813 "813" { assert ($1 == 813); $$ = $1; } -| n814 "814" { assert ($1 == 814); $$ = $1; } -| n815 "815" { assert ($1 == 815); $$ = $1; } -| n816 "816" { assert ($1 == 816); $$ = $1; } -| n817 "817" { assert ($1 == 817); $$ = $1; } -| n818 "818" { assert ($1 == 818); $$ = $1; } -| n819 "819" { assert ($1 == 819); $$ = $1; } -| n820 "820" { assert ($1 == 820); $$ = $1; } -| n821 "821" { assert ($1 == 821); $$ = $1; } -| n822 "822" { assert ($1 == 822); $$ = $1; } -| n823 "823" { assert ($1 == 823); $$ = $1; } -| n824 "824" { assert ($1 == 824); $$ = $1; } -| n825 "825" { assert ($1 == 825); $$ = $1; } -| n826 "826" { assert ($1 == 826); $$ = $1; } -| n827 "827" { assert ($1 == 827); $$ = $1; } -| n828 "828" { assert ($1 == 828); $$ = $1; } -| n829 "829" { assert ($1 == 829); $$ = $1; } -| n830 "830" { assert ($1 == 830); $$ = $1; } -| n831 "831" { assert ($1 == 831); $$ = $1; } -| n832 "832" { assert ($1 == 832); $$ = $1; } -| n833 "833" { assert ($1 == 833); $$ = $1; } -| n834 "834" { assert ($1 == 834); $$ = $1; } -| n835 "835" { assert ($1 == 835); $$ = $1; } -| n836 "836" { assert ($1 == 836); $$ = $1; } -| n837 "837" { assert ($1 == 837); $$ = $1; } -| n838 "838" { assert ($1 == 838); $$ = $1; } -| n839 "839" { assert ($1 == 839); $$ = $1; } -| n840 "840" { assert ($1 == 840); $$ = $1; } -| n841 "841" { assert ($1 == 841); $$ = $1; } -| n842 "842" { assert ($1 == 842); $$ = $1; } -| n843 "843" { assert ($1 == 843); $$ = $1; } -| n844 "844" { assert ($1 == 844); $$ = $1; } -| n845 "845" { assert ($1 == 845); $$ = $1; } -| n846 "846" { assert ($1 == 846); $$ = $1; } -| n847 "847" { assert ($1 == 847); $$ = $1; } -| n848 "848" { assert ($1 == 848); $$ = $1; } -| n849 "849" { assert ($1 == 849); $$ = $1; } -| n850 "850" { assert ($1 == 850); $$ = $1; } -| n851 "851" { assert ($1 == 851); $$ = $1; } -| n852 "852" { assert ($1 == 852); $$ = $1; } -| n853 "853" { assert ($1 == 853); $$ = $1; } -| n854 "854" { assert ($1 == 854); $$ = $1; } -| n855 "855" { assert ($1 == 855); $$ = $1; } -| n856 "856" { assert ($1 == 856); $$ = $1; } -| n857 "857" { assert ($1 == 857); $$ = $1; } -| n858 "858" { assert ($1 == 858); $$ = $1; } -| n859 "859" { assert ($1 == 859); $$ = $1; } -| n860 "860" { assert ($1 == 860); $$ = $1; } -| n861 "861" { assert ($1 == 861); $$ = $1; } -| n862 "862" { assert ($1 == 862); $$ = $1; } -| n863 "863" { assert ($1 == 863); $$ = $1; } -| n864 "864" { assert ($1 == 864); $$ = $1; } -| n865 "865" { assert ($1 == 865); $$ = $1; } -| n866 "866" { assert ($1 == 866); $$ = $1; } -| n867 "867" { assert ($1 == 867); $$ = $1; } -| n868 "868" { assert ($1 == 868); $$ = $1; } -| n869 "869" { assert ($1 == 869); $$ = $1; } -| n870 "870" { assert ($1 == 870); $$ = $1; } -| n871 "871" { assert ($1 == 871); $$ = $1; } -| n872 "872" { assert ($1 == 872); $$ = $1; } -| n873 "873" { assert ($1 == 873); $$ = $1; } -| n874 "874" { assert ($1 == 874); $$ = $1; } -| n875 "875" { assert ($1 == 875); $$ = $1; } -| n876 "876" { assert ($1 == 876); $$ = $1; } -| n877 "877" { assert ($1 == 877); $$ = $1; } -| n878 "878" { assert ($1 == 878); $$ = $1; } -| n879 "879" { assert ($1 == 879); $$ = $1; } -| n880 "880" { assert ($1 == 880); $$ = $1; } -| n881 "881" { assert ($1 == 881); $$ = $1; } -| n882 "882" { assert ($1 == 882); $$ = $1; } -| n883 "883" { assert ($1 == 883); $$ = $1; } -| n884 "884" { assert ($1 == 884); $$ = $1; } -| n885 "885" { assert ($1 == 885); $$ = $1; } -| n886 "886" { assert ($1 == 886); $$ = $1; } -| n887 "887" { assert ($1 == 887); $$ = $1; } -| n888 "888" { assert ($1 == 888); $$ = $1; } -| n889 "889" { assert ($1 == 889); $$ = $1; } -| n890 "890" { assert ($1 == 890); $$ = $1; } -| n891 "891" { assert ($1 == 891); $$ = $1; } -| n892 "892" { assert ($1 == 892); $$ = $1; } -| n893 "893" { assert ($1 == 893); $$ = $1; } -| n894 "894" { assert ($1 == 894); $$ = $1; } -| n895 "895" { assert ($1 == 895); $$ = $1; } -| n896 "896" { assert ($1 == 896); $$ = $1; } -| n897 "897" { assert ($1 == 897); $$ = $1; } -| n898 "898" { assert ($1 == 898); $$ = $1; } -| n899 "899" { assert ($1 == 899); $$ = $1; } -| n900 "900" { assert ($1 == 900); $$ = $1; } -| n901 "901" { assert ($1 == 901); $$ = $1; } -| n902 "902" { assert ($1 == 902); $$ = $1; } -| n903 "903" { assert ($1 == 903); $$ = $1; } -| n904 "904" { assert ($1 == 904); $$ = $1; } -| n905 "905" { assert ($1 == 905); $$ = $1; } -| n906 "906" { assert ($1 == 906); $$ = $1; } -| n907 "907" { assert ($1 == 907); $$ = $1; } -| n908 "908" { assert ($1 == 908); $$ = $1; } -| n909 "909" { assert ($1 == 909); $$ = $1; } -| n910 "910" { assert ($1 == 910); $$ = $1; } -| n911 "911" { assert ($1 == 911); $$ = $1; } -| n912 "912" { assert ($1 == 912); $$ = $1; } -| n913 "913" { assert ($1 == 913); $$ = $1; } -| n914 "914" { assert ($1 == 914); $$ = $1; } -| n915 "915" { assert ($1 == 915); $$ = $1; } -| n916 "916" { assert ($1 == 916); $$ = $1; } -| n917 "917" { assert ($1 == 917); $$ = $1; } -| n918 "918" { assert ($1 == 918); $$ = $1; } -| n919 "919" { assert ($1 == 919); $$ = $1; } -| n920 "920" { assert ($1 == 920); $$ = $1; } -| n921 "921" { assert ($1 == 921); $$ = $1; } -| n922 "922" { assert ($1 == 922); $$ = $1; } -| n923 "923" { assert ($1 == 923); $$ = $1; } -| n924 "924" { assert ($1 == 924); $$ = $1; } -| n925 "925" { assert ($1 == 925); $$ = $1; } -| n926 "926" { assert ($1 == 926); $$ = $1; } -| n927 "927" { assert ($1 == 927); $$ = $1; } -| n928 "928" { assert ($1 == 928); $$ = $1; } -| n929 "929" { assert ($1 == 929); $$ = $1; } -| n930 "930" { assert ($1 == 930); $$ = $1; } -| n931 "931" { assert ($1 == 931); $$ = $1; } -| n932 "932" { assert ($1 == 932); $$ = $1; } -| n933 "933" { assert ($1 == 933); $$ = $1; } -| n934 "934" { assert ($1 == 934); $$ = $1; } -| n935 "935" { assert ($1 == 935); $$ = $1; } -| n936 "936" { assert ($1 == 936); $$ = $1; } -| n937 "937" { assert ($1 == 937); $$ = $1; } -| n938 "938" { assert ($1 == 938); $$ = $1; } -| n939 "939" { assert ($1 == 939); $$ = $1; } -| n940 "940" { assert ($1 == 940); $$ = $1; } -| n941 "941" { assert ($1 == 941); $$ = $1; } -| n942 "942" { assert ($1 == 942); $$ = $1; } -| n943 "943" { assert ($1 == 943); $$ = $1; } -| n944 "944" { assert ($1 == 944); $$ = $1; } -| n945 "945" { assert ($1 == 945); $$ = $1; } -| n946 "946" { assert ($1 == 946); $$ = $1; } -| n947 "947" { assert ($1 == 947); $$ = $1; } -| n948 "948" { assert ($1 == 948); $$ = $1; } -| n949 "949" { assert ($1 == 949); $$ = $1; } -| n950 "950" { assert ($1 == 950); $$ = $1; } -| n951 "951" { assert ($1 == 951); $$ = $1; } -| n952 "952" { assert ($1 == 952); $$ = $1; } -| n953 "953" { assert ($1 == 953); $$ = $1; } -| n954 "954" { assert ($1 == 954); $$ = $1; } -| n955 "955" { assert ($1 == 955); $$ = $1; } -| n956 "956" { assert ($1 == 956); $$ = $1; } -| n957 "957" { assert ($1 == 957); $$ = $1; } -| n958 "958" { assert ($1 == 958); $$ = $1; } -| n959 "959" { assert ($1 == 959); $$ = $1; } -| n960 "960" { assert ($1 == 960); $$ = $1; } -| n961 "961" { assert ($1 == 961); $$ = $1; } -| n962 "962" { assert ($1 == 962); $$ = $1; } -| n963 "963" { assert ($1 == 963); $$ = $1; } -| n964 "964" { assert ($1 == 964); $$ = $1; } -| n965 "965" { assert ($1 == 965); $$ = $1; } -| n966 "966" { assert ($1 == 966); $$ = $1; } -| n967 "967" { assert ($1 == 967); $$ = $1; } -| n968 "968" { assert ($1 == 968); $$ = $1; } -| n969 "969" { assert ($1 == 969); $$ = $1; } -| n970 "970" { assert ($1 == 970); $$ = $1; } -| n971 "971" { assert ($1 == 971); $$ = $1; } -| n972 "972" { assert ($1 == 972); $$ = $1; } -| n973 "973" { assert ($1 == 973); $$ = $1; } -| n974 "974" { assert ($1 == 974); $$ = $1; } -| n975 "975" { assert ($1 == 975); $$ = $1; } -| n976 "976" { assert ($1 == 976); $$ = $1; } -| n977 "977" { assert ($1 == 977); $$ = $1; } -| n978 "978" { assert ($1 == 978); $$ = $1; } -| n979 "979" { assert ($1 == 979); $$ = $1; } -| n980 "980" { assert ($1 == 980); $$ = $1; } -| n981 "981" { assert ($1 == 981); $$ = $1; } -| n982 "982" { assert ($1 == 982); $$ = $1; } -| n983 "983" { assert ($1 == 983); $$ = $1; } -| n984 "984" { assert ($1 == 984); $$ = $1; } -| n985 "985" { assert ($1 == 985); $$ = $1; } -| n986 "986" { assert ($1 == 986); $$ = $1; } -| n987 "987" { assert ($1 == 987); $$ = $1; } -| n988 "988" { assert ($1 == 988); $$ = $1; } -| n989 "989" { assert ($1 == 989); $$ = $1; } -| n990 "990" { assert ($1 == 990); $$ = $1; } -| n991 "991" { assert ($1 == 991); $$ = $1; } -| n992 "992" { assert ($1 == 992); $$ = $1; } -| n993 "993" { assert ($1 == 993); $$ = $1; } -| n994 "994" { assert ($1 == 994); $$ = $1; } -| n995 "995" { assert ($1 == 995); $$ = $1; } -| n996 "996" { assert ($1 == 996); $$ = $1; } -| n997 "997" { assert ($1 == 997); $$ = $1; } -| n998 "998" { assert ($1 == 998); $$ = $1; } -| n999 "999" { assert ($1 == 999); $$ = $1; } -| n1000 "1000" { assert ($1 == 1000); $$ = $1; } -; -n1: token { $$ = 1; }; -n2: token { $$ = 2; }; -n3: token { $$ = 3; }; -n4: token { $$ = 4; }; -n5: token { $$ = 5; }; -n6: token { $$ = 6; }; -n7: token { $$ = 7; }; -n8: token { $$ = 8; }; -n9: token { $$ = 9; }; -n10: token { $$ = 10; }; -n11: token { $$ = 11; }; -n12: token { $$ = 12; }; -n13: token { $$ = 13; }; -n14: token { $$ = 14; }; -n15: token { $$ = 15; }; -n16: token { $$ = 16; }; -n17: token { $$ = 17; }; -n18: token { $$ = 18; }; -n19: token { $$ = 19; }; -n20: token { $$ = 20; }; -n21: token { $$ = 21; }; -n22: token { $$ = 22; }; -n23: token { $$ = 23; }; -n24: token { $$ = 24; }; -n25: token { $$ = 25; }; -n26: token { $$ = 26; }; -n27: token { $$ = 27; }; -n28: token { $$ = 28; }; -n29: token { $$ = 29; }; -n30: token { $$ = 30; }; -n31: token { $$ = 31; }; -n32: token { $$ = 32; }; -n33: token { $$ = 33; }; -n34: token { $$ = 34; }; -n35: token { $$ = 35; }; -n36: token { $$ = 36; }; -n37: token { $$ = 37; }; -n38: token { $$ = 38; }; -n39: token { $$ = 39; }; -n40: token { $$ = 40; }; -n41: token { $$ = 41; }; -n42: token { $$ = 42; }; -n43: token { $$ = 43; }; -n44: token { $$ = 44; }; -n45: token { $$ = 45; }; -n46: token { $$ = 46; }; -n47: token { $$ = 47; }; -n48: token { $$ = 48; }; -n49: token { $$ = 49; }; -n50: token { $$ = 50; }; -n51: token { $$ = 51; }; -n52: token { $$ = 52; }; -n53: token { $$ = 53; }; -n54: token { $$ = 54; }; -n55: token { $$ = 55; }; -n56: token { $$ = 56; }; -n57: token { $$ = 57; }; -n58: token { $$ = 58; }; -n59: token { $$ = 59; }; -n60: token { $$ = 60; }; -n61: token { $$ = 61; }; -n62: token { $$ = 62; }; -n63: token { $$ = 63; }; -n64: token { $$ = 64; }; -n65: token { $$ = 65; }; -n66: token { $$ = 66; }; -n67: token { $$ = 67; }; -n68: token { $$ = 68; }; -n69: token { $$ = 69; }; -n70: token { $$ = 70; }; -n71: token { $$ = 71; }; -n72: token { $$ = 72; }; -n73: token { $$ = 73; }; -n74: token { $$ = 74; }; -n75: token { $$ = 75; }; -n76: token { $$ = 76; }; -n77: token { $$ = 77; }; -n78: token { $$ = 78; }; -n79: token { $$ = 79; }; -n80: token { $$ = 80; }; -n81: token { $$ = 81; }; -n82: token { $$ = 82; }; -n83: token { $$ = 83; }; -n84: token { $$ = 84; }; -n85: token { $$ = 85; }; -n86: token { $$ = 86; }; -n87: token { $$ = 87; }; -n88: token { $$ = 88; }; -n89: token { $$ = 89; }; -n90: token { $$ = 90; }; -n91: token { $$ = 91; }; -n92: token { $$ = 92; }; -n93: token { $$ = 93; }; -n94: token { $$ = 94; }; -n95: token { $$ = 95; }; -n96: token { $$ = 96; }; -n97: token { $$ = 97; }; -n98: token { $$ = 98; }; -n99: token { $$ = 99; }; -n100: token { $$ = 100; }; -n101: token { $$ = 101; }; -n102: token { $$ = 102; }; -n103: token { $$ = 103; }; -n104: token { $$ = 104; }; -n105: token { $$ = 105; }; -n106: token { $$ = 106; }; -n107: token { $$ = 107; }; -n108: token { $$ = 108; }; -n109: token { $$ = 109; }; -n110: token { $$ = 110; }; -n111: token { $$ = 111; }; -n112: token { $$ = 112; }; -n113: token { $$ = 113; }; -n114: token { $$ = 114; }; -n115: token { $$ = 115; }; -n116: token { $$ = 116; }; -n117: token { $$ = 117; }; -n118: token { $$ = 118; }; -n119: token { $$ = 119; }; -n120: token { $$ = 120; }; -n121: token { $$ = 121; }; -n122: token { $$ = 122; }; -n123: token { $$ = 123; }; -n124: token { $$ = 124; }; -n125: token { $$ = 125; }; -n126: token { $$ = 126; }; -n127: token { $$ = 127; }; -n128: token { $$ = 128; }; -n129: token { $$ = 129; }; -n130: token { $$ = 130; }; -n131: token { $$ = 131; }; -n132: token { $$ = 132; }; -n133: token { $$ = 133; }; -n134: token { $$ = 134; }; -n135: token { $$ = 135; }; -n136: token { $$ = 136; }; -n137: token { $$ = 137; }; -n138: token { $$ = 138; }; -n139: token { $$ = 139; }; -n140: token { $$ = 140; }; -n141: token { $$ = 141; }; -n142: token { $$ = 142; }; -n143: token { $$ = 143; }; -n144: token { $$ = 144; }; -n145: token { $$ = 145; }; -n146: token { $$ = 146; }; -n147: token { $$ = 147; }; -n148: token { $$ = 148; }; -n149: token { $$ = 149; }; -n150: token { $$ = 150; }; -n151: token { $$ = 151; }; -n152: token { $$ = 152; }; -n153: token { $$ = 153; }; -n154: token { $$ = 154; }; -n155: token { $$ = 155; }; -n156: token { $$ = 156; }; -n157: token { $$ = 157; }; -n158: token { $$ = 158; }; -n159: token { $$ = 159; }; -n160: token { $$ = 160; }; -n161: token { $$ = 161; }; -n162: token { $$ = 162; }; -n163: token { $$ = 163; }; -n164: token { $$ = 164; }; -n165: token { $$ = 165; }; -n166: token { $$ = 166; }; -n167: token { $$ = 167; }; -n168: token { $$ = 168; }; -n169: token { $$ = 169; }; -n170: token { $$ = 170; }; -n171: token { $$ = 171; }; -n172: token { $$ = 172; }; -n173: token { $$ = 173; }; -n174: token { $$ = 174; }; -n175: token { $$ = 175; }; -n176: token { $$ = 176; }; -n177: token { $$ = 177; }; -n178: token { $$ = 178; }; -n179: token { $$ = 179; }; -n180: token { $$ = 180; }; -n181: token { $$ = 181; }; -n182: token { $$ = 182; }; -n183: token { $$ = 183; }; -n184: token { $$ = 184; }; -n185: token { $$ = 185; }; -n186: token { $$ = 186; }; -n187: token { $$ = 187; }; -n188: token { $$ = 188; }; -n189: token { $$ = 189; }; -n190: token { $$ = 190; }; -n191: token { $$ = 191; }; -n192: token { $$ = 192; }; -n193: token { $$ = 193; }; -n194: token { $$ = 194; }; -n195: token { $$ = 195; }; -n196: token { $$ = 196; }; -n197: token { $$ = 197; }; -n198: token { $$ = 198; }; -n199: token { $$ = 199; }; -n200: token { $$ = 200; }; -n201: token { $$ = 201; }; -n202: token { $$ = 202; }; -n203: token { $$ = 203; }; -n204: token { $$ = 204; }; -n205: token { $$ = 205; }; -n206: token { $$ = 206; }; -n207: token { $$ = 207; }; -n208: token { $$ = 208; }; -n209: token { $$ = 209; }; -n210: token { $$ = 210; }; -n211: token { $$ = 211; }; -n212: token { $$ = 212; }; -n213: token { $$ = 213; }; -n214: token { $$ = 214; }; -n215: token { $$ = 215; }; -n216: token { $$ = 216; }; -n217: token { $$ = 217; }; -n218: token { $$ = 218; }; -n219: token { $$ = 219; }; -n220: token { $$ = 220; }; -n221: token { $$ = 221; }; -n222: token { $$ = 222; }; -n223: token { $$ = 223; }; -n224: token { $$ = 224; }; -n225: token { $$ = 225; }; -n226: token { $$ = 226; }; -n227: token { $$ = 227; }; -n228: token { $$ = 228; }; -n229: token { $$ = 229; }; -n230: token { $$ = 230; }; -n231: token { $$ = 231; }; -n232: token { $$ = 232; }; -n233: token { $$ = 233; }; -n234: token { $$ = 234; }; -n235: token { $$ = 235; }; -n236: token { $$ = 236; }; -n237: token { $$ = 237; }; -n238: token { $$ = 238; }; -n239: token { $$ = 239; }; -n240: token { $$ = 240; }; -n241: token { $$ = 241; }; -n242: token { $$ = 242; }; -n243: token { $$ = 243; }; -n244: token { $$ = 244; }; -n245: token { $$ = 245; }; -n246: token { $$ = 246; }; -n247: token { $$ = 247; }; -n248: token { $$ = 248; }; -n249: token { $$ = 249; }; -n250: token { $$ = 250; }; -n251: token { $$ = 251; }; -n252: token { $$ = 252; }; -n253: token { $$ = 253; }; -n254: token { $$ = 254; }; -n255: token { $$ = 255; }; -n256: token { $$ = 256; }; -n257: token { $$ = 257; }; -n258: token { $$ = 258; }; -n259: token { $$ = 259; }; -n260: token { $$ = 260; }; -n261: token { $$ = 261; }; -n262: token { $$ = 262; }; -n263: token { $$ = 263; }; -n264: token { $$ = 264; }; -n265: token { $$ = 265; }; -n266: token { $$ = 266; }; -n267: token { $$ = 267; }; -n268: token { $$ = 268; }; -n269: token { $$ = 269; }; -n270: token { $$ = 270; }; -n271: token { $$ = 271; }; -n272: token { $$ = 272; }; -n273: token { $$ = 273; }; -n274: token { $$ = 274; }; -n275: token { $$ = 275; }; -n276: token { $$ = 276; }; -n277: token { $$ = 277; }; -n278: token { $$ = 278; }; -n279: token { $$ = 279; }; -n280: token { $$ = 280; }; -n281: token { $$ = 281; }; -n282: token { $$ = 282; }; -n283: token { $$ = 283; }; -n284: token { $$ = 284; }; -n285: token { $$ = 285; }; -n286: token { $$ = 286; }; -n287: token { $$ = 287; }; -n288: token { $$ = 288; }; -n289: token { $$ = 289; }; -n290: token { $$ = 290; }; -n291: token { $$ = 291; }; -n292: token { $$ = 292; }; -n293: token { $$ = 293; }; -n294: token { $$ = 294; }; -n295: token { $$ = 295; }; -n296: token { $$ = 296; }; -n297: token { $$ = 297; }; -n298: token { $$ = 298; }; -n299: token { $$ = 299; }; -n300: token { $$ = 300; }; -n301: token { $$ = 301; }; -n302: token { $$ = 302; }; -n303: token { $$ = 303; }; -n304: token { $$ = 304; }; -n305: token { $$ = 305; }; -n306: token { $$ = 306; }; -n307: token { $$ = 307; }; -n308: token { $$ = 308; }; -n309: token { $$ = 309; }; -n310: token { $$ = 310; }; -n311: token { $$ = 311; }; -n312: token { $$ = 312; }; -n313: token { $$ = 313; }; -n314: token { $$ = 314; }; -n315: token { $$ = 315; }; -n316: token { $$ = 316; }; -n317: token { $$ = 317; }; -n318: token { $$ = 318; }; -n319: token { $$ = 319; }; -n320: token { $$ = 320; }; -n321: token { $$ = 321; }; -n322: token { $$ = 322; }; -n323: token { $$ = 323; }; -n324: token { $$ = 324; }; -n325: token { $$ = 325; }; -n326: token { $$ = 326; }; -n327: token { $$ = 327; }; -n328: token { $$ = 328; }; -n329: token { $$ = 329; }; -n330: token { $$ = 330; }; -n331: token { $$ = 331; }; -n332: token { $$ = 332; }; -n333: token { $$ = 333; }; -n334: token { $$ = 334; }; -n335: token { $$ = 335; }; -n336: token { $$ = 336; }; -n337: token { $$ = 337; }; -n338: token { $$ = 338; }; -n339: token { $$ = 339; }; -n340: token { $$ = 340; }; -n341: token { $$ = 341; }; -n342: token { $$ = 342; }; -n343: token { $$ = 343; }; -n344: token { $$ = 344; }; -n345: token { $$ = 345; }; -n346: token { $$ = 346; }; -n347: token { $$ = 347; }; -n348: token { $$ = 348; }; -n349: token { $$ = 349; }; -n350: token { $$ = 350; }; -n351: token { $$ = 351; }; -n352: token { $$ = 352; }; -n353: token { $$ = 353; }; -n354: token { $$ = 354; }; -n355: token { $$ = 355; }; -n356: token { $$ = 356; }; -n357: token { $$ = 357; }; -n358: token { $$ = 358; }; -n359: token { $$ = 359; }; -n360: token { $$ = 360; }; -n361: token { $$ = 361; }; -n362: token { $$ = 362; }; -n363: token { $$ = 363; }; -n364: token { $$ = 364; }; -n365: token { $$ = 365; }; -n366: token { $$ = 366; }; -n367: token { $$ = 367; }; -n368: token { $$ = 368; }; -n369: token { $$ = 369; }; -n370: token { $$ = 370; }; -n371: token { $$ = 371; }; -n372: token { $$ = 372; }; -n373: token { $$ = 373; }; -n374: token { $$ = 374; }; -n375: token { $$ = 375; }; -n376: token { $$ = 376; }; -n377: token { $$ = 377; }; -n378: token { $$ = 378; }; -n379: token { $$ = 379; }; -n380: token { $$ = 380; }; -n381: token { $$ = 381; }; -n382: token { $$ = 382; }; -n383: token { $$ = 383; }; -n384: token { $$ = 384; }; -n385: token { $$ = 385; }; -n386: token { $$ = 386; }; -n387: token { $$ = 387; }; -n388: token { $$ = 388; }; -n389: token { $$ = 389; }; -n390: token { $$ = 390; }; -n391: token { $$ = 391; }; -n392: token { $$ = 392; }; -n393: token { $$ = 393; }; -n394: token { $$ = 394; }; -n395: token { $$ = 395; }; -n396: token { $$ = 396; }; -n397: token { $$ = 397; }; -n398: token { $$ = 398; }; -n399: token { $$ = 399; }; -n400: token { $$ = 400; }; -n401: token { $$ = 401; }; -n402: token { $$ = 402; }; -n403: token { $$ = 403; }; -n404: token { $$ = 404; }; -n405: token { $$ = 405; }; -n406: token { $$ = 406; }; -n407: token { $$ = 407; }; -n408: token { $$ = 408; }; -n409: token { $$ = 409; }; -n410: token { $$ = 410; }; -n411: token { $$ = 411; }; -n412: token { $$ = 412; }; -n413: token { $$ = 413; }; -n414: token { $$ = 414; }; -n415: token { $$ = 415; }; -n416: token { $$ = 416; }; -n417: token { $$ = 417; }; -n418: token { $$ = 418; }; -n419: token { $$ = 419; }; -n420: token { $$ = 420; }; -n421: token { $$ = 421; }; -n422: token { $$ = 422; }; -n423: token { $$ = 423; }; -n424: token { $$ = 424; }; -n425: token { $$ = 425; }; -n426: token { $$ = 426; }; -n427: token { $$ = 427; }; -n428: token { $$ = 428; }; -n429: token { $$ = 429; }; -n430: token { $$ = 430; }; -n431: token { $$ = 431; }; -n432: token { $$ = 432; }; -n433: token { $$ = 433; }; -n434: token { $$ = 434; }; -n435: token { $$ = 435; }; -n436: token { $$ = 436; }; -n437: token { $$ = 437; }; -n438: token { $$ = 438; }; -n439: token { $$ = 439; }; -n440: token { $$ = 440; }; -n441: token { $$ = 441; }; -n442: token { $$ = 442; }; -n443: token { $$ = 443; }; -n444: token { $$ = 444; }; -n445: token { $$ = 445; }; -n446: token { $$ = 446; }; -n447: token { $$ = 447; }; -n448: token { $$ = 448; }; -n449: token { $$ = 449; }; -n450: token { $$ = 450; }; -n451: token { $$ = 451; }; -n452: token { $$ = 452; }; -n453: token { $$ = 453; }; -n454: token { $$ = 454; }; -n455: token { $$ = 455; }; -n456: token { $$ = 456; }; -n457: token { $$ = 457; }; -n458: token { $$ = 458; }; -n459: token { $$ = 459; }; -n460: token { $$ = 460; }; -n461: token { $$ = 461; }; -n462: token { $$ = 462; }; -n463: token { $$ = 463; }; -n464: token { $$ = 464; }; -n465: token { $$ = 465; }; -n466: token { $$ = 466; }; -n467: token { $$ = 467; }; -n468: token { $$ = 468; }; -n469: token { $$ = 469; }; -n470: token { $$ = 470; }; -n471: token { $$ = 471; }; -n472: token { $$ = 472; }; -n473: token { $$ = 473; }; -n474: token { $$ = 474; }; -n475: token { $$ = 475; }; -n476: token { $$ = 476; }; -n477: token { $$ = 477; }; -n478: token { $$ = 478; }; -n479: token { $$ = 479; }; -n480: token { $$ = 480; }; -n481: token { $$ = 481; }; -n482: token { $$ = 482; }; -n483: token { $$ = 483; }; -n484: token { $$ = 484; }; -n485: token { $$ = 485; }; -n486: token { $$ = 486; }; -n487: token { $$ = 487; }; -n488: token { $$ = 488; }; -n489: token { $$ = 489; }; -n490: token { $$ = 490; }; -n491: token { $$ = 491; }; -n492: token { $$ = 492; }; -n493: token { $$ = 493; }; -n494: token { $$ = 494; }; -n495: token { $$ = 495; }; -n496: token { $$ = 496; }; -n497: token { $$ = 497; }; -n498: token { $$ = 498; }; -n499: token { $$ = 499; }; -n500: token { $$ = 500; }; -n501: token { $$ = 501; }; -n502: token { $$ = 502; }; -n503: token { $$ = 503; }; -n504: token { $$ = 504; }; -n505: token { $$ = 505; }; -n506: token { $$ = 506; }; -n507: token { $$ = 507; }; -n508: token { $$ = 508; }; -n509: token { $$ = 509; }; -n510: token { $$ = 510; }; -n511: token { $$ = 511; }; -n512: token { $$ = 512; }; -n513: token { $$ = 513; }; -n514: token { $$ = 514; }; -n515: token { $$ = 515; }; -n516: token { $$ = 516; }; -n517: token { $$ = 517; }; -n518: token { $$ = 518; }; -n519: token { $$ = 519; }; -n520: token { $$ = 520; }; -n521: token { $$ = 521; }; -n522: token { $$ = 522; }; -n523: token { $$ = 523; }; -n524: token { $$ = 524; }; -n525: token { $$ = 525; }; -n526: token { $$ = 526; }; -n527: token { $$ = 527; }; -n528: token { $$ = 528; }; -n529: token { $$ = 529; }; -n530: token { $$ = 530; }; -n531: token { $$ = 531; }; -n532: token { $$ = 532; }; -n533: token { $$ = 533; }; -n534: token { $$ = 534; }; -n535: token { $$ = 535; }; -n536: token { $$ = 536; }; -n537: token { $$ = 537; }; -n538: token { $$ = 538; }; -n539: token { $$ = 539; }; -n540: token { $$ = 540; }; -n541: token { $$ = 541; }; -n542: token { $$ = 542; }; -n543: token { $$ = 543; }; -n544: token { $$ = 544; }; -n545: token { $$ = 545; }; -n546: token { $$ = 546; }; -n547: token { $$ = 547; }; -n548: token { $$ = 548; }; -n549: token { $$ = 549; }; -n550: token { $$ = 550; }; -n551: token { $$ = 551; }; -n552: token { $$ = 552; }; -n553: token { $$ = 553; }; -n554: token { $$ = 554; }; -n555: token { $$ = 555; }; -n556: token { $$ = 556; }; -n557: token { $$ = 557; }; -n558: token { $$ = 558; }; -n559: token { $$ = 559; }; -n560: token { $$ = 560; }; -n561: token { $$ = 561; }; -n562: token { $$ = 562; }; -n563: token { $$ = 563; }; -n564: token { $$ = 564; }; -n565: token { $$ = 565; }; -n566: token { $$ = 566; }; -n567: token { $$ = 567; }; -n568: token { $$ = 568; }; -n569: token { $$ = 569; }; -n570: token { $$ = 570; }; -n571: token { $$ = 571; }; -n572: token { $$ = 572; }; -n573: token { $$ = 573; }; -n574: token { $$ = 574; }; -n575: token { $$ = 575; }; -n576: token { $$ = 576; }; -n577: token { $$ = 577; }; -n578: token { $$ = 578; }; -n579: token { $$ = 579; }; -n580: token { $$ = 580; }; -n581: token { $$ = 581; }; -n582: token { $$ = 582; }; -n583: token { $$ = 583; }; -n584: token { $$ = 584; }; -n585: token { $$ = 585; }; -n586: token { $$ = 586; }; -n587: token { $$ = 587; }; -n588: token { $$ = 588; }; -n589: token { $$ = 589; }; -n590: token { $$ = 590; }; -n591: token { $$ = 591; }; -n592: token { $$ = 592; }; -n593: token { $$ = 593; }; -n594: token { $$ = 594; }; -n595: token { $$ = 595; }; -n596: token { $$ = 596; }; -n597: token { $$ = 597; }; -n598: token { $$ = 598; }; -n599: token { $$ = 599; }; -n600: token { $$ = 600; }; -n601: token { $$ = 601; }; -n602: token { $$ = 602; }; -n603: token { $$ = 603; }; -n604: token { $$ = 604; }; -n605: token { $$ = 605; }; -n606: token { $$ = 606; }; -n607: token { $$ = 607; }; -n608: token { $$ = 608; }; -n609: token { $$ = 609; }; -n610: token { $$ = 610; }; -n611: token { $$ = 611; }; -n612: token { $$ = 612; }; -n613: token { $$ = 613; }; -n614: token { $$ = 614; }; -n615: token { $$ = 615; }; -n616: token { $$ = 616; }; -n617: token { $$ = 617; }; -n618: token { $$ = 618; }; -n619: token { $$ = 619; }; -n620: token { $$ = 620; }; -n621: token { $$ = 621; }; -n622: token { $$ = 622; }; -n623: token { $$ = 623; }; -n624: token { $$ = 624; }; -n625: token { $$ = 625; }; -n626: token { $$ = 626; }; -n627: token { $$ = 627; }; -n628: token { $$ = 628; }; -n629: token { $$ = 629; }; -n630: token { $$ = 630; }; -n631: token { $$ = 631; }; -n632: token { $$ = 632; }; -n633: token { $$ = 633; }; -n634: token { $$ = 634; }; -n635: token { $$ = 635; }; -n636: token { $$ = 636; }; -n637: token { $$ = 637; }; -n638: token { $$ = 638; }; -n639: token { $$ = 639; }; -n640: token { $$ = 640; }; -n641: token { $$ = 641; }; -n642: token { $$ = 642; }; -n643: token { $$ = 643; }; -n644: token { $$ = 644; }; -n645: token { $$ = 645; }; -n646: token { $$ = 646; }; -n647: token { $$ = 647; }; -n648: token { $$ = 648; }; -n649: token { $$ = 649; }; -n650: token { $$ = 650; }; -n651: token { $$ = 651; }; -n652: token { $$ = 652; }; -n653: token { $$ = 653; }; -n654: token { $$ = 654; }; -n655: token { $$ = 655; }; -n656: token { $$ = 656; }; -n657: token { $$ = 657; }; -n658: token { $$ = 658; }; -n659: token { $$ = 659; }; -n660: token { $$ = 660; }; -n661: token { $$ = 661; }; -n662: token { $$ = 662; }; -n663: token { $$ = 663; }; -n664: token { $$ = 664; }; -n665: token { $$ = 665; }; -n666: token { $$ = 666; }; -n667: token { $$ = 667; }; -n668: token { $$ = 668; }; -n669: token { $$ = 669; }; -n670: token { $$ = 670; }; -n671: token { $$ = 671; }; -n672: token { $$ = 672; }; -n673: token { $$ = 673; }; -n674: token { $$ = 674; }; -n675: token { $$ = 675; }; -n676: token { $$ = 676; }; -n677: token { $$ = 677; }; -n678: token { $$ = 678; }; -n679: token { $$ = 679; }; -n680: token { $$ = 680; }; -n681: token { $$ = 681; }; -n682: token { $$ = 682; }; -n683: token { $$ = 683; }; -n684: token { $$ = 684; }; -n685: token { $$ = 685; }; -n686: token { $$ = 686; }; -n687: token { $$ = 687; }; -n688: token { $$ = 688; }; -n689: token { $$ = 689; }; -n690: token { $$ = 690; }; -n691: token { $$ = 691; }; -n692: token { $$ = 692; }; -n693: token { $$ = 693; }; -n694: token { $$ = 694; }; -n695: token { $$ = 695; }; -n696: token { $$ = 696; }; -n697: token { $$ = 697; }; -n698: token { $$ = 698; }; -n699: token { $$ = 699; }; -n700: token { $$ = 700; }; -n701: token { $$ = 701; }; -n702: token { $$ = 702; }; -n703: token { $$ = 703; }; -n704: token { $$ = 704; }; -n705: token { $$ = 705; }; -n706: token { $$ = 706; }; -n707: token { $$ = 707; }; -n708: token { $$ = 708; }; -n709: token { $$ = 709; }; -n710: token { $$ = 710; }; -n711: token { $$ = 711; }; -n712: token { $$ = 712; }; -n713: token { $$ = 713; }; -n714: token { $$ = 714; }; -n715: token { $$ = 715; }; -n716: token { $$ = 716; }; -n717: token { $$ = 717; }; -n718: token { $$ = 718; }; -n719: token { $$ = 719; }; -n720: token { $$ = 720; }; -n721: token { $$ = 721; }; -n722: token { $$ = 722; }; -n723: token { $$ = 723; }; -n724: token { $$ = 724; }; -n725: token { $$ = 725; }; -n726: token { $$ = 726; }; -n727: token { $$ = 727; }; -n728: token { $$ = 728; }; -n729: token { $$ = 729; }; -n730: token { $$ = 730; }; -n731: token { $$ = 731; }; -n732: token { $$ = 732; }; -n733: token { $$ = 733; }; -n734: token { $$ = 734; }; -n735: token { $$ = 735; }; -n736: token { $$ = 736; }; -n737: token { $$ = 737; }; -n738: token { $$ = 738; }; -n739: token { $$ = 739; }; -n740: token { $$ = 740; }; -n741: token { $$ = 741; }; -n742: token { $$ = 742; }; -n743: token { $$ = 743; }; -n744: token { $$ = 744; }; -n745: token { $$ = 745; }; -n746: token { $$ = 746; }; -n747: token { $$ = 747; }; -n748: token { $$ = 748; }; -n749: token { $$ = 749; }; -n750: token { $$ = 750; }; -n751: token { $$ = 751; }; -n752: token { $$ = 752; }; -n753: token { $$ = 753; }; -n754: token { $$ = 754; }; -n755: token { $$ = 755; }; -n756: token { $$ = 756; }; -n757: token { $$ = 757; }; -n758: token { $$ = 758; }; -n759: token { $$ = 759; }; -n760: token { $$ = 760; }; -n761: token { $$ = 761; }; -n762: token { $$ = 762; }; -n763: token { $$ = 763; }; -n764: token { $$ = 764; }; -n765: token { $$ = 765; }; -n766: token { $$ = 766; }; -n767: token { $$ = 767; }; -n768: token { $$ = 768; }; -n769: token { $$ = 769; }; -n770: token { $$ = 770; }; -n771: token { $$ = 771; }; -n772: token { $$ = 772; }; -n773: token { $$ = 773; }; -n774: token { $$ = 774; }; -n775: token { $$ = 775; }; -n776: token { $$ = 776; }; -n777: token { $$ = 777; }; -n778: token { $$ = 778; }; -n779: token { $$ = 779; }; -n780: token { $$ = 780; }; -n781: token { $$ = 781; }; -n782: token { $$ = 782; }; -n783: token { $$ = 783; }; -n784: token { $$ = 784; }; -n785: token { $$ = 785; }; -n786: token { $$ = 786; }; -n787: token { $$ = 787; }; -n788: token { $$ = 788; }; -n789: token { $$ = 789; }; -n790: token { $$ = 790; }; -n791: token { $$ = 791; }; -n792: token { $$ = 792; }; -n793: token { $$ = 793; }; -n794: token { $$ = 794; }; -n795: token { $$ = 795; }; -n796: token { $$ = 796; }; -n797: token { $$ = 797; }; -n798: token { $$ = 798; }; -n799: token { $$ = 799; }; -n800: token { $$ = 800; }; -n801: token { $$ = 801; }; -n802: token { $$ = 802; }; -n803: token { $$ = 803; }; -n804: token { $$ = 804; }; -n805: token { $$ = 805; }; -n806: token { $$ = 806; }; -n807: token { $$ = 807; }; -n808: token { $$ = 808; }; -n809: token { $$ = 809; }; -n810: token { $$ = 810; }; -n811: token { $$ = 811; }; -n812: token { $$ = 812; }; -n813: token { $$ = 813; }; -n814: token { $$ = 814; }; -n815: token { $$ = 815; }; -n816: token { $$ = 816; }; -n817: token { $$ = 817; }; -n818: token { $$ = 818; }; -n819: token { $$ = 819; }; -n820: token { $$ = 820; }; -n821: token { $$ = 821; }; -n822: token { $$ = 822; }; -n823: token { $$ = 823; }; -n824: token { $$ = 824; }; -n825: token { $$ = 825; }; -n826: token { $$ = 826; }; -n827: token { $$ = 827; }; -n828: token { $$ = 828; }; -n829: token { $$ = 829; }; -n830: token { $$ = 830; }; -n831: token { $$ = 831; }; -n832: token { $$ = 832; }; -n833: token { $$ = 833; }; -n834: token { $$ = 834; }; -n835: token { $$ = 835; }; -n836: token { $$ = 836; }; -n837: token { $$ = 837; }; -n838: token { $$ = 838; }; -n839: token { $$ = 839; }; -n840: token { $$ = 840; }; -n841: token { $$ = 841; }; -n842: token { $$ = 842; }; -n843: token { $$ = 843; }; -n844: token { $$ = 844; }; -n845: token { $$ = 845; }; -n846: token { $$ = 846; }; -n847: token { $$ = 847; }; -n848: token { $$ = 848; }; -n849: token { $$ = 849; }; -n850: token { $$ = 850; }; -n851: token { $$ = 851; }; -n852: token { $$ = 852; }; -n853: token { $$ = 853; }; -n854: token { $$ = 854; }; -n855: token { $$ = 855; }; -n856: token { $$ = 856; }; -n857: token { $$ = 857; }; -n858: token { $$ = 858; }; -n859: token { $$ = 859; }; -n860: token { $$ = 860; }; -n861: token { $$ = 861; }; -n862: token { $$ = 862; }; -n863: token { $$ = 863; }; -n864: token { $$ = 864; }; -n865: token { $$ = 865; }; -n866: token { $$ = 866; }; -n867: token { $$ = 867; }; -n868: token { $$ = 868; }; -n869: token { $$ = 869; }; -n870: token { $$ = 870; }; -n871: token { $$ = 871; }; -n872: token { $$ = 872; }; -n873: token { $$ = 873; }; -n874: token { $$ = 874; }; -n875: token { $$ = 875; }; -n876: token { $$ = 876; }; -n877: token { $$ = 877; }; -n878: token { $$ = 878; }; -n879: token { $$ = 879; }; -n880: token { $$ = 880; }; -n881: token { $$ = 881; }; -n882: token { $$ = 882; }; -n883: token { $$ = 883; }; -n884: token { $$ = 884; }; -n885: token { $$ = 885; }; -n886: token { $$ = 886; }; -n887: token { $$ = 887; }; -n888: token { $$ = 888; }; -n889: token { $$ = 889; }; -n890: token { $$ = 890; }; -n891: token { $$ = 891; }; -n892: token { $$ = 892; }; -n893: token { $$ = 893; }; -n894: token { $$ = 894; }; -n895: token { $$ = 895; }; -n896: token { $$ = 896; }; -n897: token { $$ = 897; }; -n898: token { $$ = 898; }; -n899: token { $$ = 899; }; -n900: token { $$ = 900; }; -n901: token { $$ = 901; }; -n902: token { $$ = 902; }; -n903: token { $$ = 903; }; -n904: token { $$ = 904; }; -n905: token { $$ = 905; }; -n906: token { $$ = 906; }; -n907: token { $$ = 907; }; -n908: token { $$ = 908; }; -n909: token { $$ = 909; }; -n910: token { $$ = 910; }; -n911: token { $$ = 911; }; -n912: token { $$ = 912; }; -n913: token { $$ = 913; }; -n914: token { $$ = 914; }; -n915: token { $$ = 915; }; -n916: token { $$ = 916; }; -n917: token { $$ = 917; }; -n918: token { $$ = 918; }; -n919: token { $$ = 919; }; -n920: token { $$ = 920; }; -n921: token { $$ = 921; }; -n922: token { $$ = 922; }; -n923: token { $$ = 923; }; -n924: token { $$ = 924; }; -n925: token { $$ = 925; }; -n926: token { $$ = 926; }; -n927: token { $$ = 927; }; -n928: token { $$ = 928; }; -n929: token { $$ = 929; }; -n930: token { $$ = 930; }; -n931: token { $$ = 931; }; -n932: token { $$ = 932; }; -n933: token { $$ = 933; }; -n934: token { $$ = 934; }; -n935: token { $$ = 935; }; -n936: token { $$ = 936; }; -n937: token { $$ = 937; }; -n938: token { $$ = 938; }; -n939: token { $$ = 939; }; -n940: token { $$ = 940; }; -n941: token { $$ = 941; }; -n942: token { $$ = 942; }; -n943: token { $$ = 943; }; -n944: token { $$ = 944; }; -n945: token { $$ = 945; }; -n946: token { $$ = 946; }; -n947: token { $$ = 947; }; -n948: token { $$ = 948; }; -n949: token { $$ = 949; }; -n950: token { $$ = 950; }; -n951: token { $$ = 951; }; -n952: token { $$ = 952; }; -n953: token { $$ = 953; }; -n954: token { $$ = 954; }; -n955: token { $$ = 955; }; -n956: token { $$ = 956; }; -n957: token { $$ = 957; }; -n958: token { $$ = 958; }; -n959: token { $$ = 959; }; -n960: token { $$ = 960; }; -n961: token { $$ = 961; }; -n962: token { $$ = 962; }; -n963: token { $$ = 963; }; -n964: token { $$ = 964; }; -n965: token { $$ = 965; }; -n966: token { $$ = 966; }; -n967: token { $$ = 967; }; -n968: token { $$ = 968; }; -n969: token { $$ = 969; }; -n970: token { $$ = 970; }; -n971: token { $$ = 971; }; -n972: token { $$ = 972; }; -n973: token { $$ = 973; }; -n974: token { $$ = 974; }; -n975: token { $$ = 975; }; -n976: token { $$ = 976; }; -n977: token { $$ = 977; }; -n978: token { $$ = 978; }; -n979: token { $$ = 979; }; -n980: token { $$ = 980; }; -n981: token { $$ = 981; }; -n982: token { $$ = 982; }; -n983: token { $$ = 983; }; -n984: token { $$ = 984; }; -n985: token { $$ = 985; }; -n986: token { $$ = 986; }; -n987: token { $$ = 987; }; -n988: token { $$ = 988; }; -n989: token { $$ = 989; }; -n990: token { $$ = 990; }; -n991: token { $$ = 991; }; -n992: token { $$ = 992; }; -n993: token { $$ = 993; }; -n994: token { $$ = 994; }; -n995: token { $$ = 995; }; -n996: token { $$ = 996; }; -n997: token { $$ = 997; }; -n998: token { $$ = 998; }; -n999: token { $$ = 999; }; -n1000: token { $$ = 1000; }; -%% - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int return_token = 1; - static int counter = 1; - if (counter > MAX) - { - assert (counter++ == MAX + 1); - return 0; - } - if (return_token) - { - return_token = 0; - return token; - } - return_token = 1; - return counter++; -} - -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} -./calc.at:1489: cat stderr -./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: -./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y - | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1482: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -563. calc.at:1489: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - -stdout: -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#include -#define MAX 200 -static int yylex (void); -#include - -/* !POSIX */ static void yyerror (const char *msg); -%} -%union -{ - int val; -}; - -%token END "end" -%type exp input -%token t1 1 "1" -%token t2 2 "2" -%token t3 3 "3" -%token t4 4 "4" -%token t5 5 "5" -%token t6 6 "6" -%token t7 7 "7" -%token t8 8 "8" -%token t9 9 "9" -%token t10 10 "10" -%token t11 11 "11" -%token t12 12 "12" -%token t13 13 "13" -%token t14 14 "14" -%token t15 15 "15" -%token t16 16 "16" -%token t17 17 "17" -%token t18 18 "18" -%token t19 19 "19" -%token t20 20 "20" -%token t21 21 "21" -%token t22 22 "22" -%token t23 23 "23" -%token t24 24 "24" -%token t25 25 "25" -%token t26 26 "26" -%token t27 27 "27" -%token t28 28 "28" -%token t29 29 "29" -%token t30 30 "30" -%token t31 31 "31" -%token t32 32 "32" -%token t33 33 "33" -%token t34 34 "34" -%token t35 35 "35" -%token t36 36 "36" -%token t37 37 "37" -%token t38 38 "38" -%token t39 39 "39" -%token t40 40 "40" -%token t41 41 "41" -%token t42 42 "42" -%token t43 43 "43" -%token t44 44 "44" -%token t45 45 "45" -%token t46 46 "46" -%token t47 47 "47" -%token t48 48 "48" -%token t49 49 "49" -%token t50 50 "50" -%token t51 51 "51" -%token t52 52 "52" -%token t53 53 "53" -%token t54 54 "54" -%token t55 55 "55" -%token t56 56 "56" -%token t57 57 "57" -%token t58 58 "58" -%token t59 59 "59" -%token t60 60 "60" -%token t61 61 "61" -%token t62 62 "62" -%token t63 63 "63" -%token t64 64 "64" -%token t65 65 "65" -%token t66 66 "66" -%token t67 67 "67" -%token t68 68 "68" -%token t69 69 "69" -%token t70 70 "70" -%token t71 71 "71" -%token t72 72 "72" -%token t73 73 "73" -%token t74 74 "74" -%token t75 75 "75" -%token t76 76 "76" -%token t77 77 "77" -%token t78 78 "78" -%token t79 79 "79" -%token t80 80 "80" -%token t81 81 "81" -%token t82 82 "82" -%token t83 83 "83" -%token t84 84 "84" -%token t85 85 "85" -%token t86 86 "86" -%token t87 87 "87" -%token t88 88 "88" -%token t89 89 "89" -%token t90 90 "90" -%token t91 91 "91" -%token t92 92 "92" -%token t93 93 "93" -%token t94 94 "94" -%token t95 95 "95" -%token t96 96 "96" -%token t97 97 "97" -%token t98 98 "98" -%token t99 99 "99" -%token t100 100 "100" -%token t101 101 "101" -%token t102 102 "102" -%token t103 103 "103" -%token t104 104 "104" -%token t105 105 "105" -%token t106 106 "106" -%token t107 107 "107" -%token t108 108 "108" -%token t109 109 "109" -%token t110 110 "110" -%token t111 111 "111" -%token t112 112 "112" -%token t113 113 "113" -%token t114 114 "114" -%token t115 115 "115" -%token t116 116 "116" -%token t117 117 "117" -%token t118 118 "118" -%token t119 119 "119" -%token t120 120 "120" -%token t121 121 "121" -%token t122 122 "122" -%token t123 123 "123" -%token t124 124 "124" -%token t125 125 "125" -%token t126 126 "126" -%token t127 127 "127" -%token t128 128 "128" -%token t129 129 "129" -%token t130 130 "130" -%token t131 131 "131" -%token t132 132 "132" -%token t133 133 "133" -%token t134 134 "134" -%token t135 135 "135" -%token t136 136 "136" -%token t137 137 "137" -%token t138 138 "138" -%token t139 139 "139" -%token t140 140 "140" -%token t141 141 "141" -%token t142 142 "142" -%token t143 143 "143" -%token t144 144 "144" -%token t145 145 "145" -%token t146 146 "146" -%token t147 147 "147" -%token t148 148 "148" -%token t149 149 "149" -%token t150 150 "150" -%token t151 151 "151" -%token t152 152 "152" -%token t153 153 "153" -%token t154 154 "154" -%token t155 155 "155" -%token t156 156 "156" -%token t157 157 "157" -%token t158 158 "158" -%token t159 159 "159" -%token t160 160 "160" -%token t161 161 "161" -%token t162 162 "162" -%token t163 163 "163" -%token t164 164 "164" -%token t165 165 "165" -%token t166 166 "166" -%token t167 167 "167" -%token t168 168 "168" -%token t169 169 "169" -%token t170 170 "170" -%token t171 171 "171" -%token t172 172 "172" -%token t173 173 "173" -%token t174 174 "174" -%token t175 175 "175" -%token t176 176 "176" -%token t177 177 "177" -%token t178 178 "178" -%token t179 179 "179" -%token t180 180 "180" -%token t181 181 "181" -%token t182 182 "182" -%token t183 183 "183" -%token t184 184 "184" -%token t185 185 "185" -%token t186 186 "186" -%token t187 187 "187" -%token t188 188 "188" -%token t189 189 "189" -%token t190 190 "190" -%token t191 191 "191" -%token t192 192 "192" -%token t193 193 "193" -%token t194 194 "194" -%token t195 195 "195" -%token t196 196 "196" -%token t197 197 "197" -%token t198 198 "198" -%token t199 199 "199" -%token t200 200 "200" -%% -input: - exp { assert ($1 == 0); $$ = $1; } -| input exp { assert ($2 == $1 + 1); $$ = $2; } -; - -exp: - END - { $$ = 0; } -| "1" END - { $$ = 1; } -| "1" "2" END - { $$ = 2; } -| "1" "2" "3" END - { $$ = 3; } -| "1" "2" "3" "4" END - { $$ = 4; } -| "1" "2" "3" "4" "5" END - { $$ = 5; } -| "1" "2" "3" "4" "5" "6" END - { $$ = 6; } -| "1" "2" "3" "4" "5" "6" "7" END - { $$ = 7; } -| "1" "2" "3" "4" "5" "6" "7" "8" END - { $$ = 8; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" END - { $$ = 9; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END - { $$ = 10; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END - { $$ = 11; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END - { $$ = 12; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END - { $$ = 13; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END - { $$ = 14; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END - { $$ = 15; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - END - { $$ = 16; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" END - { $$ = 17; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" END - { $$ = 18; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" END - { $$ = 19; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" END - { $$ = 20; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" END - { $$ = 21; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" END - { $$ = 22; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" END - { $$ = 23; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" END - { $$ = 24; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" END - { $$ = 25; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END - { $$ = 26; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END - { $$ = 27; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END - { $$ = 28; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END - { $$ = 29; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - END - { $$ = 30; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" END - { $$ = 31; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" END - { $$ = 32; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" END - { $$ = 33; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" END - { $$ = 34; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" END - { $$ = 35; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" END - { $$ = 36; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" END - { $$ = 37; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" END - { $$ = 38; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" END - { $$ = 39; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END - { $$ = 40; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END - { $$ = 41; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END - { $$ = 42; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END - { $$ = 43; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - END - { $$ = 44; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" END - { $$ = 45; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" END - { $$ = 46; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" END - { $$ = 47; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" END - { $$ = 48; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" END - { $$ = 49; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" END - { $$ = 50; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" END - { $$ = 51; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" END - { $$ = 52; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" END - { $$ = 53; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END - { $$ = 54; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END - { $$ = 55; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END - { $$ = 56; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END - { $$ = 57; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - END - { $$ = 58; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" END - { $$ = 59; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" END - { $$ = 60; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" END - { $$ = 61; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" END - { $$ = 62; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" END - { $$ = 63; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" END - { $$ = 64; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" END - { $$ = 65; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" END - { $$ = 66; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" END - { $$ = 67; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END - { $$ = 68; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END - { $$ = 69; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END - { $$ = 70; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END - { $$ = 71; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - END - { $$ = 72; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" END - { $$ = 73; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" END - { $$ = 74; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" END - { $$ = 75; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" END - { $$ = 76; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" END - { $$ = 77; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" END - { $$ = 78; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" END - { $$ = 79; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" END - { $$ = 80; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" END - { $$ = 81; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END - { $$ = 82; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END - { $$ = 83; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END - { $$ = 84; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END - { $$ = 85; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - END - { $$ = 86; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" END - { $$ = 87; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" END - { $$ = 88; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" END - { $$ = 89; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" END - { $$ = 90; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" END - { $$ = 91; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" END - { $$ = 92; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" END - { $$ = 93; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" END - { $$ = 94; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" END - { $$ = 95; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END - { $$ = 96; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END - { $$ = 97; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END - { $$ = 98; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END - { $$ = 99; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - END - { $$ = 100; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" END - { $$ = 101; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" END - { $$ = 102; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" END - { $$ = 103; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" END - { $$ = 104; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" END - { $$ = 105; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" END - { $$ = 106; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" END - { $$ = 107; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" END - { $$ = 108; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" END - { $$ = 109; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END - { $$ = 110; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END - { $$ = 111; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - END - { $$ = 112; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" END - { $$ = 113; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" END - { $$ = 114; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" END - { $$ = 115; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" END - { $$ = 116; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" END - { $$ = 117; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" END - { $$ = 118; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" END - { $$ = 119; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" END - { $$ = 120; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" END - { $$ = 121; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END - { $$ = 122; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END - { $$ = 123; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - END - { $$ = 124; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" END - { $$ = 125; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" END - { $$ = 126; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" END - { $$ = 127; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" END - { $$ = 128; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" END - { $$ = 129; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" END - { $$ = 130; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" END - { $$ = 131; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" END - { $$ = 132; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" END - { $$ = 133; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END - { $$ = 134; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END - { $$ = 135; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - END - { $$ = 136; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" END - { $$ = 137; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" END - { $$ = 138; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" END - { $$ = 139; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" END - { $$ = 140; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" END - { $$ = 141; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" END - { $$ = 142; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" END - { $$ = 143; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" END - { $$ = 144; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" END - { $$ = 145; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END - { $$ = 146; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END - { $$ = 147; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - END - { $$ = 148; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" END - { $$ = 149; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" END - { $$ = 150; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" END - { $$ = 151; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" END - { $$ = 152; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" END - { $$ = 153; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" END - { $$ = 154; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" END - { $$ = 155; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" END - { $$ = 156; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" END - { $$ = 157; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END - { $$ = 158; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END - { $$ = 159; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - END - { $$ = 160; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" END - { $$ = 161; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" END - { $$ = 162; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" END - { $$ = 163; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" END - { $$ = 164; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" END - { $$ = 165; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" END - { $$ = 166; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" END - { $$ = 167; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" END - { $$ = 168; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" END - { $$ = 169; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END - { $$ = 170; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END - { $$ = 171; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - END - { $$ = 172; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" END - { $$ = 173; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" END - { $$ = 174; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" END - { $$ = 175; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" END - { $$ = 176; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" END - { $$ = 177; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" END - { $$ = 178; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" END - { $$ = 179; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" END - { $$ = 180; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" END - { $$ = 181; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END - { $$ = 182; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END - { $$ = 183; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - END - { $$ = 184; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" END - { $$ = 185; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" END - { $$ = 186; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" END - { $$ = 187; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" END - { $$ = 188; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" END - { $$ = 189; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" END - { $$ = 190; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" END - { $$ = 191; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" END - { $$ = 192; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" END - { $$ = 193; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END - { $$ = 194; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END - { $$ = 195; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - END - { $$ = 196; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" END - { $$ = 197; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" END - { $$ = 198; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" END - { $$ = 199; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" END - { $$ = 200; } -; -%% - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int inner = 1; - static int outer = 0; - if (outer > MAX) - return 0; - else if (inner > outer) - { - inner = 1; - ++outer; - return END; - } - return inner++; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} -./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -stdout: -./calc.at:1485: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1482: cat stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1485: $PREPARSER ./calc input -stderr: -557. calc.at:1482: ok -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: - | 1 2 - -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | 1//2 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1485: cat stderr -./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -stderr: -./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1485: $PREPARSER ./calc input -stderr: -./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -stderr: -memory exhausted -memory exhausted -./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -memory exhausted -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1485: cat stderr -stderr: -input: - | 1 = 2 = 3 -stdout: -./calc.at:1485: $PREPARSER ./calc input -./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: -stderr: -./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 - | 1 2 -./calc.at:1479: $PREPARSER ./calc input -stderr: -stderr: -memory exhausted -memory exhausted -./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -memory exhausted -memory exhausted -syntax error, unexpected number -./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1485: cat stderr -./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1479: cat stderr -input: - | 1//2 -stderr: -./calc.at:1479: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1479: cat stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1485: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | error -./calc.at:1479: $PREPARSER ./calc input -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -stderr: -syntax error, unexpected invalid token -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -stderr: -syntax error, unexpected invalid token -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1485: cat stderr -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./calc.at:1479: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -./calc.at:1485: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '=' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -input: -input: - | (!!) + (1 2) = 1 - | - | +1 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -syntax error, unexpected '+' -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -./calc.at:1479: $PREPARSER ./calc /dev/null -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -input: - | (- *) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error, unexpected end of input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error, unexpected end of input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -input: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1479: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -input: -input: - | (- *) + (1 2) = 1 - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1479: cat stderr -input: - | (* *) + (*) + (*) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: cat stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1479: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: cat stderr -input: - | 1 + 2 * 3 + !- ++ -input: - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1479: cat stderr -stdout: -./calc.at:1485: cat stderr -./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -input: -stderr: -./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1479: $PREPARSER ./calc input -stderr: - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -stderr: -memory exhausted -memory exhausted -./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -memory exhausted -614. torture.at:485: stderr: -stderr: - ok -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stdout: -./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -stderr: -./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 - -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -stderr: -./calc.at:1479: cat stderr -./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./calc.at:1485: $PREPARSER ./calc input -stderr: -memory exhausted -memory exhausted -input: -./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -memory exhausted -memory exhausted -stderr: -stderr: -615. torture.at:531: ok -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () - -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input: -input: - | (# + 1) = 1111 - | (1 + 1) / (1 - 1) -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1479: $PREPARSER ./calc input -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -syntax error: invalid character: '#' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1479: cat stderr -559. calc.at:1485: ok -input: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... -stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -error: null divisor -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1479: cat stderr -stdout: -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -553. calc.at:1479: ok - -input: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1486: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -625. regression.at:25: testing Trivial grammars ... -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | error -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | - | +1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1486: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -stdout: -./calc.at:1487: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1487: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1486: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n'./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -input: - | 1 2 - | 1 + 2 * 3 + !- ++ -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1487: $PREPARSER ./calc input -stderr: -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -./calc.at:1486: cat stderr -input: -input: - | (#) + (#) = 2222 -./calc.at:1486: $PREPARSER ./calc input - | 1//2 -./calc.at:1487: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -stdout: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1487: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -./calc.at:1486: cat stderr - | error -./calc.at:1487: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -input: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -stderr: -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1487: $PREPARSER ./calc input -input: - | 1 2 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1486: cat stderr -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -input: - | (# + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -./calc.at:1487: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | - | +1 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -input: -./calc.at:1486: cat stderr - | 1//2 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: - | (1 + # + 1) = 1111 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1487: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | error -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./calc.at:1486: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1487: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 = 2 = 3 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: - | (!!) + (1 2) = 1 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1486: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -561. calc.at:1486: ok -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1491: cat stderr -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input: - | - | +1 -./calc.at:1491: $PREPARSER ./calc input -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./calc.at:1487: cat stderr - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -input: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1487: $PREPARSER ./calc input -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1491: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1487: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | (* *) + (*) + (*) -./calc.at:1487: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1491: cat stderr -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1487: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 + !+ ++ -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1487: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -626. regression.at:55: testing YYSTYPE typedef ... -stderr: -./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: cat stderr -input: - | 1 2 -./calc.at:1492: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1487: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -625. regression.at:25: ok -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1487: cat stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr - -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: - | (#) + (#) = 2222 -./calc.at:1487: $PREPARSER ./calc input -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -input: - | 1//2 -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stdout: -stderr: -./existing.at:74: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error, unexpected '*', expecting NEWLINE or '{' or ';' -./calc.at:1487: cat stderr -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -616. existing.at:74: ok -./calc.at:1491: cat stderr -./calc.at:1492: cat stderr -input: -input: - | (1 + #) = 1111 - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1487: $PREPARSER ./calc input - -input: - | error -stderr: -./calc.at:1492: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -627. regression.at:85: testing Early token definitions with --yacc ... -./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -stderr: -./existing.at:74: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -617. existing.at:74: ok -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1487: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1492: cat stderr -./calc.at:1487: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 + !- ++ -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1487: cat stderr -stderr: -./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1487: $PREPARSER ./calc input -stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: - | - | +1 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -626. regression.at:55: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -628. regression.at:127: testing Early token definitions without --yacc ... -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1491: cat stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -./calc.at:1492: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1491: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc /dev/null -./calc.at:1487: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1491: cat stderr -./calc.at:1487: cat stderr -input: -./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1492: $PREPARSER ./calc input -input: - | (1 + #) = 1111 -562. calc.at:1487: ok -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -629. regression.at:173: testing Braces parsing ... -./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1491: cat stderr -input: -input: - | (# + 1) = 1111 - | (!!) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -630. regression.at:196: testing Rule Line Numbers ... -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c -stdout: - { tests = {{{{{{{{{{}}}}}}}}}}; } -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -629. regression.at:173: stderr: - ok -./calc.at:1492: cat stderr -stdout: -input: - | (1 + # + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input -627. regression.at:85: ok -input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1492: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -631. regression.at:345: testing Mixing %token styles ... -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./regression.at:235: cat input.output -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -630. regression.at:196: ok - | (1 + 1) / (1 - 1) -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1492: $PREPARSER ./calc input ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -224189,294 +218295,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1491: cat stderr stderr: -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror -stdout: -./calc.at:1494: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -565. calc.at:1491: ok -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -628. regression.at:127: ok -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - -./calc.at:1492: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1492: $PREPARSER ./calc input -633. regression.at:438: testing Token definitions: parse.error=verbose ... -stderr: -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -stderr: Starting parse Entering state 0 Reading a token @@ -224776,72 +218596,7 @@ Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -225378,14 +219133,98 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -632. regression.at:437: testing Token definitions: parse.error=detailed ... -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -226219,98 +220058,98 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +592. calc.at:1547: testing Calculator Java parse.error=verbose ... +input: +./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | 1 2 +590. calc.at:1545: ./calc.at:1492: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1 2 -stderr: -input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] -input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] -./calc.at:1494: $PREPARSER ./calc input -input: -./regression.at:357: sed 's,.*/$,,' stderr 1>&2 -stderr: -634. regression.at:447: testing Characters Escapes ... -./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: - | 1 + 2 * 3 + !- ++ +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + skipped (calc.at:1545) stderr: -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - Starting parse Entering state 0 Reading a token @@ -226326,10 +220165,21 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error -stderr: -stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +591. calc.at:1546: stderr: + skipped (calc.at:1546) +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -226345,87 +220195,141 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) +input: + +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: cat stderr +stderr: +input: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1489: $PREPARSER ./calc input + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1//2 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -226438,61 +220342,156 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: "$PERL" -pi -e 'use strict; + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... +./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -226502,7 +220501,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -226513,12 +220511,87 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1491: cat stderr +./calc.at:1492: cat stderr +592. calc.at:1547: input: +565. calc.at:1491: ok + | error +./calc.at:1492: $PREPARSER ./calc input + skipped (calc.at:1547) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... +./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... +./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... +./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... +./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +593. calc.at:1548: skipped (calc.at:1548) + +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +594. calc.at:1549: skipped (calc.at:1549) input: - | 1//2 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1494: $PREPARSER ./calc input + +598. calc.at:1554: testing Calculator Java api.push-pull=both ... +./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y ./calc.at:1492: cat stderr -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -stderr: +595. calc.at:1550: 596. calc.at:1551: skipped (calc.at:1551) + skipped (calc.at:1550) + + stderr: Starting parse Entering state 0 @@ -226531,1811 +220604,1809 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () +Next token is token '-' (9.7: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (9.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (10.13: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () +Next token is token ')' (13.5: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (13.9: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1492: $PREPARSER ./calc input -stderr: +597. calc.at:1552: skipped (calc.at:1552) stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () +Next token is token '-' (9.7: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (9.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () +Next token is token '=' (10.13: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () +Next token is token ')' (13.5: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () +Next token is token '=' (13.9: ) Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + +input: + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... +./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y input: -635. regression.at:480: testing Web2c Report ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none - | 1 2 -./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y -./calc.at:1489: $PREPARSER ./calc input stderr: -636. regression.at:661: testing Web2c Actions ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y + | 1 2 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +598. calc.at:1554: ./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 + skipped (calc.at:1554) +./calc.at:1492: $PREPARSER ./calc input +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -228343,14 +222414,43 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) + +602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: +601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... +600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... + | 1//2 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228360,142 +222460,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +599. calc.at:1555: ./calc.at:1492: cat stderr + skipped (calc.at:1555) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc /dev/null stderr: stderr: -./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none -input: -./calc.at:1492: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) + +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228505,9 +222523,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +input: | error ./calc.at:1494: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228517,16 +222552,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1492: cat stderr ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr +./calc.at:1492: cat stderr stderr: Starting parse Entering state 0 @@ -228535,8 +222562,8 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) input: - | 1//2 -./calc.at:1489: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -228547,98 +222574,243 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: +602. calc.at:1560: stderr: +./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1494: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -228651,106 +222823,247 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1560) +600. calc.at:1556: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +601. calc.at:1557: 603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... + skipped (calc.at:1556) + skipped (calc.at:1557) stderr: - | 1 = 2 = 3 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -stderr: -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Starting parse -Entering state 0 +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -228763,11 +223076,13 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: + + | 1 = 2 = 3 ./calc.at:1494: $PREPARSER ./calc input -./regression.at:506: cat input.output -./regression.at:438: sed 's,.*/$,,' stderr 1>&2 -./regression.at:679: cat tables.c -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none + +stderr: ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -228778,7 +223093,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -228808,37 +223122,10 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -635. regression.at:480: ok -636. regression.at:661: ok -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -stderr: -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./calc.at:1492: cat stderr + ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -input: -./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1492: cat stderr stderr: - | (# + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -228868,9 +223155,9 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) - -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -stderr: +input: + | (!!) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -228881,7 +223168,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +stderr: +./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token @@ -228889,56 +223177,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -228951,11 +223281,9 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1489: $PREPARSER ./calc input -stderr: +604. torture.at:132: testing Big triangle ... +./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: stderr: Starting parse Entering state 0 @@ -228964,56 +223292,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229026,29 +223396,10 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1494: cat stderr -./torture.at:238: $PREPARSER ./input -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () | | +1 +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -229059,10 +223410,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1492: cat stderr stderr: -605. torture.at:216: ok Starting parse Entering state 0 Reading a token @@ -229083,19 +223431,7 @@ Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1492: cat stderr stderr: Starting parse Entering state 0 @@ -229116,8 +223452,19 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) - -./calc.at:1489: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -229126,70 +223473,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token ')' (1.11: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229202,22 +223582,9 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input -631. regression.at:345: ok -./calc.at:1494: cat stderr +605. torture.at:216: testing Big horizontal ... +./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 stderr: Starting parse Entering state 0 @@ -229226,70 +223593,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token ')' (1.11: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229302,112 +223702,169 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./calc.at:1494: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +603. calc.at:1561: skipped (calc.at:1561) +607. torture.at:271: testing State number type: 129 states ... +./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 ./calc.at:1494: $PREPARSER ./calc /dev/null +./calc.at:1492: cat stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./torture.at:141: $PREPARSER ./input -stderr: +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +606. torture.at:270: testing State number type: 128 states ... +./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +input: stderr: + | (* *) + (*) + (*) +./calc.at:1492: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2024-12-29 02:12:50.738523871 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2024-12-29 02:12:50.738523871 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1492: cat stderr -604. torture.at:132: ok -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +606. torture.at:270: 607. torture.at:271: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (torture.at:270) ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -229418,20 +223875,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -input: -stderr: -stdout: -634. regression.at:447: ok - | (1 + 1) / (1 - 1) -./calc.at:1489: cat stderr -./calc.at:1494: cat stderr -./calc.at:1492: $PREPARSER ./calc input -input: -input: - | - | +1 -./calc.at:1489: $PREPARSER ./calc input + skipped (torture.at:271) stderr: Starting parse Entering state 0 @@ -229440,101 +223884,101 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 +Entering state 11 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -229548,33 +223992,27 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: cat stderr + + +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1492: cat stderr ./calc.at:1494: $PREPARSER ./calc input +input: stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - - + | 1 + 2 * 3 + !+ ++ +./calc.at:1492: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -229582,102 +224020,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229691,29 +224261,74 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -637. regression.at:812: testing Useless Tokens ... -./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -638. regression.at:1143: testing Dancer ... +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -229895,60 +224510,2424 @@ Shifting token '*' (1.39: ) Entering state 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1492: $PREPARSER ./calc input +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +stderr: +608. torture.at:272: testing State number type: 256 states ... +./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2024-12-29 02:12:50.862518782 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found +input: +608. torture.at:272: | (#) + (#) = 2222 + skipped (torture.at:272) +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +610. torture.at:274: testing State number type: 32768 states ... +./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 +stdout: +stderr: +609. torture.at:273: testing State number type: 257 states ... +./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} + +%define parse.error verbose +%debug +%{ +#include +#include +#define MAX 1000 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} + +%token + t1 1 "1" + t2 2 "2" + t3 3 "3" + t4 4 "4" + t5 5 "5" + t6 6 "6" + t7 7 "7" + t8 8 "8" + t9 9 "9" + t10 10 "10" + t11 11 "11" + t12 12 "12" + t13 13 "13" + t14 14 "14" + t15 15 "15" + t16 16 "16" + t17 17 "17" + t18 18 "18" + t19 19 "19" + t20 20 "20" + t21 21 "21" + t22 22 "22" + t23 23 "23" + t24 24 "24" + t25 25 "25" + t26 26 "26" + t27 27 "27" + t28 28 "28" + t29 29 "29" + t30 30 "30" + t31 31 "31" + t32 32 "32" + t33 33 "33" + t34 34 "34" + t35 35 "35" + t36 36 "36" + t37 37 "37" + t38 38 "38" + t39 39 "39" + t40 40 "40" + t41 41 "41" + t42 42 "42" + t43 43 "43" + t44 44 "44" + t45 45 "45" + t46 46 "46" + t47 47 "47" + t48 48 "48" + t49 49 "49" + t50 50 "50" + t51 51 "51" + t52 52 "52" + t53 53 "53" + t54 54 "54" + t55 55 "55" + t56 56 "56" + t57 57 "57" + t58 58 "58" + t59 59 "59" + t60 60 "60" + t61 61 "61" + t62 62 "62" + t63 63 "63" + t64 64 "64" + t65 65 "65" + t66 66 "66" + t67 67 "67" + t68 68 "68" + t69 69 "69" + t70 70 "70" + t71 71 "71" + t72 72 "72" + t73 73 "73" + t74 74 "74" + t75 75 "75" + t76 76 "76" + t77 77 "77" + t78 78 "78" + t79 79 "79" + t80 80 "80" + t81 81 "81" + t82 82 "82" + t83 83 "83" + t84 84 "84" + t85 85 "85" + t86 86 "86" + t87 87 "87" + t88 88 "88" + t89 89 "89" + t90 90 "90" + t91 91 "91" + t92 92 "92" + t93 93 "93" + t94 94 "94" + t95 95 "95" + t96 96 "96" + t97 97 "97" + t98 98 "98" + t99 99 "99" + t100 100 "100" + t101 101 "101" + t102 102 "102" + t103 103 "103" + t104 104 "104" + t105 105 "105" + t106 106 "106" + t107 107 "107" + t108 108 "108" + t109 109 "109" + t110 110 "110" + t111 111 "111" + t112 112 "112" + t113 113 "113" + t114 114 "114" + t115 115 "115" + t116 116 "116" + t117 117 "117" + t118 118 "118" + t119 119 "119" + t120 120 "120" + t121 121 "121" + t122 122 "122" + t123 123 "123" + t124 124 "124" + t125 125 "125" + t126 126 "126" + t127 127 "127" + t128 128 "128" + t129 129 "129" + t130 130 "130" + t131 131 "131" + t132 132 "132" + t133 133 "133" + t134 134 "134" + t135 135 "135" + t136 136 "136" + t137 137 "137" + t138 138 "138" + t139 139 "139" + t140 140 "140" + t141 141 "141" + t142 142 "142" + t143 143 "143" + t144 144 "144" + t145 145 "145" + t146 146 "146" + t147 147 "147" + t148 148 "148" + t149 149 "149" + t150 150 "150" + t151 151 "151" + t152 152 "152" + t153 153 "153" + t154 154 "154" + t155 155 "155" + t156 156 "156" + t157 157 "157" + t158 158 "158" + t159 159 "159" + t160 160 "160" + t161 161 "161" + t162 162 "162" + t163 163 "163" + t164 164 "164" + t165 165 "165" + t166 166 "166" + t167 167 "167" + t168 168 "168" + t169 169 "169" + t170 170 "170" + t171 171 "171" + t172 172 "172" + t173 173 "173" + t174 174 "174" + t175 175 "175" + t176 176 "176" + t177 177 "177" + t178 178 "178" + t179 179 "179" + t180 180 "180" + t181 181 "181" + t182 182 "182" + t183 183 "183" + t184 184 "184" + t185 185 "185" + t186 186 "186" + t187 187 "187" + t188 188 "188" + t189 189 "189" + t190 190 "190" + t191 191 "191" + t192 192 "192" + t193 193 "193" + t194 194 "194" + t195 195 "195" + t196 196 "196" + t197 197 "197" + t198 198 "198" + t199 199 "199" + t200 200 "200" + t201 201 "201" + t202 202 "202" + t203 203 "203" + t204 204 "204" + t205 205 "205" + t206 206 "206" + t207 207 "207" + t208 208 "208" + t209 209 "209" + t210 210 "210" + t211 211 "211" + t212 212 "212" + t213 213 "213" + t214 214 "214" + t215 215 "215" + t216 216 "216" + t217 217 "217" + t218 218 "218" + t219 219 "219" + t220 220 "220" + t221 221 "221" + t222 222 "222" + t223 223 "223" + t224 224 "224" + t225 225 "225" + t226 226 "226" + t227 227 "227" + t228 228 "228" + t229 229 "229" + t230 230 "230" + t231 231 "231" + t232 232 "232" + t233 233 "233" + t234 234 "234" + t235 235 "235" + t236 236 "236" + t237 237 "237" + t238 238 "238" + t239 239 "239" + t240 240 "240" + t241 241 "241" + t242 242 "242" + t243 243 "243" + t244 244 "244" + t245 245 "245" + t246 246 "246" + t247 247 "247" + t248 248 "248" + t249 249 "249" + t250 250 "250" + t251 251 "251" + t252 252 "252" + t253 253 "253" + t254 254 "254" + t255 255 "255" + t256 256 "256" + t257 257 "257" + t258 258 "258" + t259 259 "259" + t260 260 "260" + t261 261 "261" + t262 262 "262" + t263 263 "263" + t264 264 "264" + t265 265 "265" + t266 266 "266" + t267 267 "267" + t268 268 "268" + t269 269 "269" + t270 270 "270" + t271 271 "271" + t272 272 "272" + t273 273 "273" + t274 274 "274" + t275 275 "275" + t276 276 "276" + t277 277 "277" + t278 278 "278" + t279 279 "279" + t280 280 "280" + t281 281 "281" + t282 282 "282" + t283 283 "283" + t284 284 "284" + t285 285 "285" + t286 286 "286" + t287 287 "287" + t288 288 "288" + t289 289 "289" + t290 290 "290" + t291 291 "291" + t292 292 "292" + t293 293 "293" + t294 294 "294" + t295 295 "295" + t296 296 "296" + t297 297 "297" + t298 298 "298" + t299 299 "299" + t300 300 "300" + t301 301 "301" + t302 302 "302" + t303 303 "303" + t304 304 "304" + t305 305 "305" + t306 306 "306" + t307 307 "307" + t308 308 "308" + t309 309 "309" + t310 310 "310" + t311 311 "311" + t312 312 "312" + t313 313 "313" + t314 314 "314" + t315 315 "315" + t316 316 "316" + t317 317 "317" + t318 318 "318" + t319 319 "319" + t320 320 "320" + t321 321 "321" + t322 322 "322" + t323 323 "323" + t324 324 "324" + t325 325 "325" + t326 326 "326" + t327 327 "327" + t328 328 "328" + t329 329 "329" + t330 330 "330" + t331 331 "331" + t332 332 "332" + t333 333 "333" + t334 334 "334" + t335 335 "335" + t336 336 "336" + t337 337 "337" + t338 338 "338" + t339 339 "339" + t340 340 "340" + t341 341 "341" + t342 342 "342" + t343 343 "343" + t344 344 "344" + t345 345 "345" + t346 346 "346" + t347 347 "347" + t348 348 "348" + t349 349 "349" + t350 350 "350" + t351 351 "351" + t352 352 "352" + t353 353 "353" + t354 354 "354" + t355 355 "355" + t356 356 "356" + t357 357 "357" + t358 358 "358" + t359 359 "359" + t360 360 "360" + t361 361 "361" + t362 362 "362" + t363 363 "363" + t364 364 "364" + t365 365 "365" + t366 366 "366" + t367 367 "367" + t368 368 "368" + t369 369 "369" + t370 370 "370" + t371 371 "371" + t372 372 "372" + t373 373 "373" + t374 374 "374" + t375 375 "375" + t376 376 "376" + t377 377 "377" + t378 378 "378" + t379 379 "379" + t380 380 "380" + t381 381 "381" + t382 382 "382" + t383 383 "383" + t384 384 "384" + t385 385 "385" + t386 386 "386" + t387 387 "387" + t388 388 "388" + t389 389 "389" + t390 390 "390" + t391 391 "391" + t392 392 "392" + t393 393 "393" + t394 394 "394" + t395 395 "395" + t396 396 "396" + t397 397 "397" + t398 398 "398" + t399 399 "399" + t400 400 "400" + t401 401 "401" + t402 402 "402" + t403 403 "403" + t404 404 "404" + t405 405 "405" + t406 406 "406" + t407 407 "407" + t408 408 "408" + t409 409 "409" + t410 410 "410" + t411 411 "411" + t412 412 "412" + t413 413 "413" + t414 414 "414" + t415 415 "415" + t416 416 "416" + t417 417 "417" + t418 418 "418" + t419 419 "419" + t420 420 "420" + t421 421 "421" + t422 422 "422" + t423 423 "423" + t424 424 "424" + t425 425 "425" + t426 426 "426" + t427 427 "427" + t428 428 "428" + t429 429 "429" + t430 430 "430" + t431 431 "431" + t432 432 "432" + t433 433 "433" + t434 434 "434" + t435 435 "435" + t436 436 "436" + t437 437 "437" + t438 438 "438" + t439 439 "439" + t440 440 "440" + t441 441 "441" + t442 442 "442" + t443 443 "443" + t444 444 "444" + t445 445 "445" + t446 446 "446" + t447 447 "447" + t448 448 "448" + t449 449 "449" + t450 450 "450" + t451 451 "451" + t452 452 "452" + t453 453 "453" + t454 454 "454" + t455 455 "455" + t456 456 "456" + t457 457 "457" + t458 458 "458" + t459 459 "459" + t460 460 "460" + t461 461 "461" + t462 462 "462" + t463 463 "463" + t464 464 "464" + t465 465 "465" + t466 466 "466" + t467 467 "467" + t468 468 "468" + t469 469 "469" + t470 470 "470" + t471 471 "471" + t472 472 "472" + t473 473 "473" + t474 474 "474" + t475 475 "475" + t476 476 "476" + t477 477 "477" + t478 478 "478" + t479 479 "479" + t480 480 "480" + t481 481 "481" + t482 482 "482" + t483 483 "483" + t484 484 "484" + t485 485 "485" + t486 486 "486" + t487 487 "487" + t488 488 "488" + t489 489 "489" + t490 490 "490" + t491 491 "491" + t492 492 "492" + t493 493 "493" + t494 494 "494" + t495 495 "495" + t496 496 "496" + t497 497 "497" + t498 498 "498" + t499 499 "499" + t500 500 "500" + t501 501 "501" + t502 502 "502" + t503 503 "503" + t504 504 "504" + t505 505 "505" + t506 506 "506" + t507 507 "507" + t508 508 "508" + t509 509 "509" + t510 510 "510" + t511 511 "511" + t512 512 "512" + t513 513 "513" + t514 514 "514" + t515 515 "515" + t516 516 "516" + t517 517 "517" + t518 518 "518" + t519 519 "519" + t520 520 "520" + t521 521 "521" + t522 522 "522" + t523 523 "523" + t524 524 "524" + t525 525 "525" + t526 526 "526" + t527 527 "527" + t528 528 "528" + t529 529 "529" + t530 530 "530" + t531 531 "531" + t532 532 "532" + t533 533 "533" + t534 534 "534" + t535 535 "535" + t536 536 "536" + t537 537 "537" + t538 538 "538" + t539 539 "539" + t540 540 "540" + t541 541 "541" + t542 542 "542" + t543 543 "543" + t544 544 "544" + t545 545 "545" + t546 546 "546" + t547 547 "547" + t548 548 "548" + t549 549 "549" + t550 550 "550" + t551 551 "551" + t552 552 "552" + t553 553 "553" + t554 554 "554" + t555 555 "555" + t556 556 "556" + t557 557 "557" + t558 558 "558" + t559 559 "559" + t560 560 "560" + t561 561 "561" + t562 562 "562" + t563 563 "563" + t564 564 "564" + t565 565 "565" + t566 566 "566" + t567 567 "567" + t568 568 "568" + t569 569 "569" + t570 570 "570" + t571 571 "571" + t572 572 "572" + t573 573 "573" + t574 574 "574" + t575 575 "575" + t576 576 "576" + t577 577 "577" + t578 578 "578" + t579 579 "579" + t580 580 "580" + t581 581 "581" + t582 582 "582" + t583 583 "583" + t584 584 "584" + t585 585 "585" + t586 586 "586" + t587 587 "587" + t588 588 "588" + t589 589 "589" + t590 590 "590" + t591 591 "591" + t592 592 "592" + t593 593 "593" + t594 594 "594" + t595 595 "595" + t596 596 "596" + t597 597 "597" + t598 598 "598" + t599 599 "599" + t600 600 "600" + t601 601 "601" + t602 602 "602" + t603 603 "603" + t604 604 "604" + t605 605 "605" + t606 606 "606" + t607 607 "607" + t608 608 "608" + t609 609 "609" + t610 610 "610" + t611 611 "611" + t612 612 "612" + t613 613 "613" + t614 614 "614" + t615 615 "615" + t616 616 "616" + t617 617 "617" + t618 618 "618" + t619 619 "619" + t620 620 "620" + t621 621 "621" + t622 622 "622" + t623 623 "623" + t624 624 "624" + t625 625 "625" + t626 626 "626" + t627 627 "627" + t628 628 "628" + t629 629 "629" + t630 630 "630" + t631 631 "631" + t632 632 "632" + t633 633 "633" + t634 634 "634" + t635 635 "635" + t636 636 "636" + t637 637 "637" + t638 638 "638" + t639 639 "639" + t640 640 "640" + t641 641 "641" + t642 642 "642" + t643 643 "643" + t644 644 "644" + t645 645 "645" + t646 646 "646" + t647 647 "647" + t648 648 "648" + t649 649 "649" + t650 650 "650" + t651 651 "651" + t652 652 "652" + t653 653 "653" + t654 654 "654" + t655 655 "655" + t656 656 "656" + t657 657 "657" + t658 658 "658" + t659 659 "659" + t660 660 "660" + t661 661 "661" + t662 662 "662" + t663 663 "663" + t664 664 "664" + t665 665 "665" + t666 666 "666" + t667 667 "667" + t668 668 "668" + t669 669 "669" + t670 670 "670" + t671 671 "671" + t672 672 "672" + t673 673 "673" + t674 674 "674" + t675 675 "675" + t676 676 "676" + t677 677 "677" + t678 678 "678" + t679 679 "679" + t680 680 "680" + t681 681 "681" + t682 682 "682" + t683 683 "683" + t684 684 "684" + t685 685 "685" + t686 686 "686" + t687 687 "687" + t688 688 "688" + t689 689 "689" + t690 690 "690" + t691 691 "691" + t692 692 "692" + t693 693 "693" + t694 694 "694" + t695 695 "695" + t696 696 "696" + t697 697 "697" + t698 698 "698" + t699 699 "699" + t700 700 "700" + t701 701 "701" + t702 702 "702" + t703 703 "703" + t704 704 "704" + t705 705 "705" + t706 706 "706" + t707 707 "707" + t708 708 "708" + t709 709 "709" + t710 710 "710" + t711 711 "711" + t712 712 "712" + t713 713 "713" + t714 714 "714" + t715 715 "715" + t716 716 "716" + t717 717 "717" + t718 718 "718" + t719 719 "719" + t720 720 "720" + t721 721 "721" + t722 722 "722" + t723 723 "723" + t724 724 "724" + t725 725 "725" + t726 726 "726" + t727 727 "727" + t728 728 "728" + t729 729 "729" + t730 730 "730" + t731 731 "731" + t732 732 "732" + t733 733 "733" + t734 734 "734" + t735 735 "735" + t736 736 "736" + t737 737 "737" + t738 738 "738" + t739 739 "739" + t740 740 "740" + t741 741 "741" + t742 742 "742" + t743 743 "743" + t744 744 "744" + t745 745 "745" + t746 746 "746" + t747 747 "747" + t748 748 "748" + t749 749 "749" + t750 750 "750" + t751 751 "751" + t752 752 "752" + t753 753 "753" + t754 754 "754" + t755 755 "755" + t756 756 "756" + t757 757 "757" + t758 758 "758" + t759 759 "759" + t760 760 "760" + t761 761 "761" + t762 762 "762" + t763 763 "763" + t764 764 "764" + t765 765 "765" + t766 766 "766" + t767 767 "767" + t768 768 "768" + t769 769 "769" + t770 770 "770" + t771 771 "771" + t772 772 "772" + t773 773 "773" + t774 774 "774" + t775 775 "775" + t776 776 "776" + t777 777 "777" + t778 778 "778" + t779 779 "779" + t780 780 "780" + t781 781 "781" + t782 782 "782" + t783 783 "783" + t784 784 "784" + t785 785 "785" + t786 786 "786" + t787 787 "787" + t788 788 "788" + t789 789 "789" + t790 790 "790" + t791 791 "791" + t792 792 "792" + t793 793 "793" + t794 794 "794" + t795 795 "795" + t796 796 "796" + t797 797 "797" + t798 798 "798" + t799 799 "799" + t800 800 "800" + t801 801 "801" + t802 802 "802" + t803 803 "803" + t804 804 "804" + t805 805 "805" + t806 806 "806" + t807 807 "807" + t808 808 "808" + t809 809 "809" + t810 810 "810" + t811 811 "811" + t812 812 "812" + t813 813 "813" + t814 814 "814" + t815 815 "815" + t816 816 "816" + t817 817 "817" + t818 818 "818" + t819 819 "819" + t820 820 "820" + t821 821 "821" + t822 822 "822" + t823 823 "823" + t824 824 "824" + t825 825 "825" + t826 826 "826" + t827 827 "827" + t828 828 "828" + t829 829 "829" + t830 830 "830" + t831 831 "831" + t832 832 "832" + t833 833 "833" + t834 834 "834" + t835 835 "835" + t836 836 "836" + t837 837 "837" + t838 838 "838" + t839 839 "839" + t840 840 "840" + t841 841 "841" + t842 842 "842" + t843 843 "843" + t844 844 "844" + t845 845 "845" + t846 846 "846" + t847 847 "847" + t848 848 "848" + t849 849 "849" + t850 850 "850" + t851 851 "851" + t852 852 "852" + t853 853 "853" + t854 854 "854" + t855 855 "855" + t856 856 "856" + t857 857 "857" + t858 858 "858" + t859 859 "859" + t860 860 "860" + t861 861 "861" + t862 862 "862" + t863 863 "863" + t864 864 "864" + t865 865 "865" + t866 866 "866" + t867 867 "867" + t868 868 "868" + t869 869 "869" + t870 870 "870" + t871 871 "871" + t872 872 "872" + t873 873 "873" + t874 874 "874" + t875 875 "875" + t876 876 "876" + t877 877 "877" + t878 878 "878" + t879 879 "879" + t880 880 "880" + t881 881 "881" + t882 882 "882" + t883 883 "883" + t884 884 "884" + t885 885 "885" + t886 886 "886" + t887 887 "887" + t888 888 "888" + t889 889 "889" + t890 890 "890" + t891 891 "891" + t892 892 "892" + t893 893 "893" + t894 894 "894" + t895 895 "895" + t896 896 "896" + t897 897 "897" + t898 898 "898" + t899 899 "899" + t900 900 "900" + t901 901 "901" + t902 902 "902" + t903 903 "903" + t904 904 "904" + t905 905 "905" + t906 906 "906" + t907 907 "907" + t908 908 "908" + t909 909 "909" + t910 910 "910" + t911 911 "911" + t912 912 "912" + t913 913 "913" + t914 914 "914" + t915 915 "915" + t916 916 "916" + t917 917 "917" + t918 918 "918" + t919 919 "919" + t920 920 "920" + t921 921 "921" + t922 922 "922" + t923 923 "923" + t924 924 "924" + t925 925 "925" + t926 926 "926" + t927 927 "927" + t928 928 "928" + t929 929 "929" + t930 930 "930" + t931 931 "931" + t932 932 "932" + t933 933 "933" + t934 934 "934" + t935 935 "935" + t936 936 "936" + t937 937 "937" + t938 938 "938" + t939 939 "939" + t940 940 "940" + t941 941 "941" + t942 942 "942" + t943 943 "943" + t944 944 "944" + t945 945 "945" + t946 946 "946" + t947 947 "947" + t948 948 "948" + t949 949 "949" + t950 950 "950" + t951 951 "951" + t952 952 "952" + t953 953 "953" + t954 954 "954" + t955 955 "955" + t956 956 "956" + t957 957 "957" + t958 958 "958" + t959 959 "959" + t960 960 "960" + t961 961 "961" + t962 962 "962" + t963 963 "963" + t964 964 "964" + t965 965 "965" + t966 966 "966" + t967 967 "967" + t968 968 "968" + t969 969 "969" + t970 970 "970" + t971 971 "971" + t972 972 "972" + t973 973 "973" + t974 974 "974" + t975 975 "975" + t976 976 "976" + t977 977 "977" + t978 978 "978" + t979 979 "979" + t980 980 "980" + t981 981 "981" + t982 982 "982" + t983 983 "983" + t984 984 "984" + t985 985 "985" + t986 986 "986" + t987 987 "987" + t988 988 "988" + t989 989 "989" + t990 990 "990" + t991 991 "991" + t992 992 "992" + t993 993 "993" + t994 994 "994" + t995 995 "995" + t996 996 "996" + t997 997 "997" + t998 998 "998" + t999 999 "999" + t1000 1000 "1000" + +%% +exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208" + "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220" + "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232" + "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244" + "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256" + "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268" + "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280" + "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292" + "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304" + "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316" + "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328" + "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340" + "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352" + "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364" + "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376" + "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388" + "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400" + "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412" + "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424" + "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436" + "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448" + "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460" + "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472" + "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484" + "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496" + "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508" + "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520" + "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532" + "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544" + "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556" + "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568" + "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580" + "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592" + "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604" + "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616" + "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628" + "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640" + "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652" + "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664" + "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676" + "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688" + "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700" + "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712" + "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724" + "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736" + "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748" + "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760" + "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772" + "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784" + "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796" + "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808" + "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820" + "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832" + "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844" + "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856" + "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868" + "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880" + "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892" + "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904" + "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916" + "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928" + "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940" + "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952" + "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964" + "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976" + "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988" + "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000" + ; +%% +#include + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int counter = 1; + if (counter <= MAX) + return counter++; + assert (counter++ == MAX + 1); + return 0; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2024-12-29 02:12:50.894517469 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +input: + | (- *) + (1 2) = 1 +./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2024-12-29 02:12:50.894517469 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found +./calc.at:1494: $PREPARSER ./calc input +610. torture.at:274: 609. torture.at:273: skipped (torture.at:273) + skipped (torture.at:274) +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1492: cat stderr + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + +input: + | (1 + #) = 1111 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1494: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229961,7 +226940,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -229973,16 +226951,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1492: cat stderr stderr: Starting parse Entering state 0 @@ -229991,234 +226960,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' (1.17: ) Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -230231,28 +227068,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -./calc.at:1489: cat stderr -567. calc.at:1492: ok -./calc.at:1489: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () - -639. regression.at:1144: testing Dancer %glr-parser ... +input: + | (# + 1) = 1111 ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -230263,27 +227080,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1494: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1489: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -230291,98 +227090,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -230395,8 +227152,17 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1494: $PREPARSER ./calc input +611. torture.at:275: testing State number type: 65536 states ... +./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 stderr: +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2024-12-29 02:12:51.002513037 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found Starting parse Entering state 0 Reading a token @@ -230404,98 +227170,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -230512,501 +227236,466 @@ Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +611. torture.at:275: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (torture.at:275) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '*' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + + | 1 + 2 * 3 + !- ++ +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: cat stderr +613. torture.at:385: testing Many lookahead tokens ... +./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 +612. torture.at:276: testing State number type: 65537 states ... +./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 stderr: -640. regression.at:1145: testing Dancer lalr1.cc ... -./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y +input: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2024-12-29 02:12:51.042511396 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found + | (1 + # + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +612. torture.at:276: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + skipped (torture.at:276) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' () +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231017,24 +227706,3180 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr +stdout: +%define parse.error verbose +%debug +%{ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include + +# include +# include +# include +# define MAX 1000 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} +%union +{ + int val; +}; + +%type input exp +%token token +%type n1 n2 n3 n4 n5 n6 n7 n8 n9 n10 n11 n12 n13 n14 n15 n16 n17 n18 + n19 n20 n21 n22 n23 n24 n25 n26 n27 n28 n29 n30 n31 n32 n33 n34 + n35 n36 n37 n38 n39 n40 n41 n42 n43 n44 n45 n46 n47 n48 n49 n50 + n51 n52 n53 n54 n55 n56 n57 n58 n59 n60 n61 n62 n63 n64 n65 n66 + n67 n68 n69 n70 n71 n72 n73 n74 n75 n76 n77 n78 n79 n80 n81 n82 + n83 n84 n85 n86 n87 n88 n89 n90 n91 n92 n93 n94 n95 n96 n97 n98 + n99 n100 n101 n102 n103 n104 n105 n106 n107 n108 n109 n110 n111 + n112 n113 n114 n115 n116 n117 n118 n119 n120 n121 n122 n123 + n124 n125 n126 n127 n128 n129 n130 n131 n132 n133 n134 n135 + n136 n137 n138 n139 n140 n141 n142 n143 n144 n145 n146 n147 + n148 n149 n150 n151 n152 n153 n154 n155 n156 n157 n158 n159 + n160 n161 n162 n163 n164 n165 n166 n167 n168 n169 n170 n171 + n172 n173 n174 n175 n176 n177 n178 n179 n180 n181 n182 n183 + n184 n185 n186 n187 n188 n189 n190 n191 n192 n193 n194 n195 + n196 n197 n198 n199 n200 n201 n202 n203 n204 n205 n206 n207 + n208 n209 n210 n211 n212 n213 n214 n215 n216 n217 n218 n219 + n220 n221 n222 n223 n224 n225 n226 n227 n228 n229 n230 n231 + n232 n233 n234 n235 n236 n237 n238 n239 n240 n241 n242 n243 + n244 n245 n246 n247 n248 n249 n250 n251 n252 n253 n254 n255 + n256 n257 n258 n259 n260 n261 n262 n263 n264 n265 n266 n267 + n268 n269 n270 n271 n272 n273 n274 n275 n276 n277 n278 n279 + n280 n281 n282 n283 n284 n285 n286 n287 n288 n289 n290 n291 + n292 n293 n294 n295 n296 n297 n298 n299 n300 n301 n302 n303 + n304 n305 n306 n307 n308 n309 n310 n311 n312 n313 n314 n315 + n316 n317 n318 n319 n320 n321 n322 n323 n324 n325 n326 n327 + n328 n329 n330 n331 n332 n333 n334 n335 n336 n337 n338 n339 + n340 n341 n342 n343 n344 n345 n346 n347 n348 n349 n350 n351 + n352 n353 n354 n355 n356 n357 n358 n359 n360 n361 n362 n363 + n364 n365 n366 n367 n368 n369 n370 n371 n372 n373 n374 n375 + n376 n377 n378 n379 n380 n381 n382 n383 n384 n385 n386 n387 + n388 n389 n390 n391 n392 n393 n394 n395 n396 n397 n398 n399 + n400 n401 n402 n403 n404 n405 n406 n407 n408 n409 n410 n411 + n412 n413 n414 n415 n416 n417 n418 n419 n420 n421 n422 n423 + n424 n425 n426 n427 n428 n429 n430 n431 n432 n433 n434 n435 + n436 n437 n438 n439 n440 n441 n442 n443 n444 n445 n446 n447 + n448 n449 n450 n451 n452 n453 n454 n455 n456 n457 n458 n459 + n460 n461 n462 n463 n464 n465 n466 n467 n468 n469 n470 n471 + n472 n473 n474 n475 n476 n477 n478 n479 n480 n481 n482 n483 + n484 n485 n486 n487 n488 n489 n490 n491 n492 n493 n494 n495 + n496 n497 n498 n499 n500 n501 n502 n503 n504 n505 n506 n507 + n508 n509 n510 n511 n512 n513 n514 n515 n516 n517 n518 n519 + n520 n521 n522 n523 n524 n525 n526 n527 n528 n529 n530 n531 + n532 n533 n534 n535 n536 n537 n538 n539 n540 n541 n542 n543 + n544 n545 n546 n547 n548 n549 n550 n551 n552 n553 n554 n555 + n556 n557 n558 n559 n560 n561 n562 n563 n564 n565 n566 n567 + n568 n569 n570 n571 n572 n573 n574 n575 n576 n577 n578 n579 + n580 n581 n582 n583 n584 n585 n586 n587 n588 n589 n590 n591 + n592 n593 n594 n595 n596 n597 n598 n599 n600 n601 n602 n603 + n604 n605 n606 n607 n608 n609 n610 n611 n612 n613 n614 n615 + n616 n617 n618 n619 n620 n621 n622 n623 n624 n625 n626 n627 + n628 n629 n630 n631 n632 n633 n634 n635 n636 n637 n638 n639 + n640 n641 n642 n643 n644 n645 n646 n647 n648 n649 n650 n651 + n652 n653 n654 n655 n656 n657 n658 n659 n660 n661 n662 n663 + n664 n665 n666 n667 n668 n669 n670 n671 n672 n673 n674 n675 + n676 n677 n678 n679 n680 n681 n682 n683 n684 n685 n686 n687 + n688 n689 n690 n691 n692 n693 n694 n695 n696 n697 n698 n699 + n700 n701 n702 n703 n704 n705 n706 n707 n708 n709 n710 n711 + n712 n713 n714 n715 n716 n717 n718 n719 n720 n721 n722 n723 + n724 n725 n726 n727 n728 n729 n730 n731 n732 n733 n734 n735 + n736 n737 n738 n739 n740 n741 n742 n743 n744 n745 n746 n747 + n748 n749 n750 n751 n752 n753 n754 n755 n756 n757 n758 n759 + n760 n761 n762 n763 n764 n765 n766 n767 n768 n769 n770 n771 + n772 n773 n774 n775 n776 n777 n778 n779 n780 n781 n782 n783 + n784 n785 n786 n787 n788 n789 n790 n791 n792 n793 n794 n795 + n796 n797 n798 n799 n800 n801 n802 n803 n804 n805 n806 n807 + n808 n809 n810 n811 n812 n813 n814 n815 n816 n817 n818 n819 + n820 n821 n822 n823 n824 n825 n826 n827 n828 n829 n830 n831 + n832 n833 n834 n835 n836 n837 n838 n839 n840 n841 n842 n843 + n844 n845 n846 n847 n848 n849 n850 n851 n852 n853 n854 n855 + n856 n857 n858 n859 n860 n861 n862 n863 n864 n865 n866 n867 + n868 n869 n870 n871 n872 n873 n874 n875 n876 n877 n878 n879 + n880 n881 n882 n883 n884 n885 n886 n887 n888 n889 n890 n891 + n892 n893 n894 n895 n896 n897 n898 n899 n900 n901 n902 n903 + n904 n905 n906 n907 n908 n909 n910 n911 n912 n913 n914 n915 + n916 n917 n918 n919 n920 n921 n922 n923 n924 n925 n926 n927 + n928 n929 n930 n931 n932 n933 n934 n935 n936 n937 n938 n939 + n940 n941 n942 n943 n944 n945 n946 n947 n948 n949 n950 n951 + n952 n953 n954 n955 n956 n957 n958 n959 n960 n961 n962 n963 + n964 n965 n966 n967 n968 n969 n970 n971 n972 n973 n974 n975 + n976 n977 n978 n979 n980 n981 n982 n983 n984 n985 n986 n987 + n988 n989 n990 n991 n992 n993 n994 n995 n996 n997 n998 n999 + n1000 +%token + t1 1 "1" + t2 2 "2" + t3 3 "3" + t4 4 "4" + t5 5 "5" + t6 6 "6" + t7 7 "7" + t8 8 "8" + t9 9 "9" + t10 10 "10" + t11 11 "11" + t12 12 "12" + t13 13 "13" + t14 14 "14" + t15 15 "15" + t16 16 "16" + t17 17 "17" + t18 18 "18" + t19 19 "19" + t20 20 "20" + t21 21 "21" + t22 22 "22" + t23 23 "23" + t24 24 "24" + t25 25 "25" + t26 26 "26" + t27 27 "27" + t28 28 "28" + t29 29 "29" + t30 30 "30" + t31 31 "31" + t32 32 "32" + t33 33 "33" + t34 34 "34" + t35 35 "35" + t36 36 "36" + t37 37 "37" + t38 38 "38" + t39 39 "39" + t40 40 "40" + t41 41 "41" + t42 42 "42" + t43 43 "43" + t44 44 "44" + t45 45 "45" + t46 46 "46" + t47 47 "47" + t48 48 "48" + t49 49 "49" + t50 50 "50" + t51 51 "51" + t52 52 "52" + t53 53 "53" + t54 54 "54" + t55 55 "55" + t56 56 "56" + t57 57 "57" + t58 58 "58" + t59 59 "59" + t60 60 "60" + t61 61 "61" + t62 62 "62" + t63 63 "63" + t64 64 "64" + t65 65 "65" + t66 66 "66" + t67 67 "67" + t68 68 "68" + t69 69 "69" + t70 70 "70" + t71 71 "71" + t72 72 "72" + t73 73 "73" + t74 74 "74" + t75 75 "75" + t76 76 "76" + t77 77 "77" + t78 78 "78" + t79 79 "79" + t80 80 "80" + t81 81 "81" + t82 82 "82" + t83 83 "83" + t84 84 "84" + t85 85 "85" + t86 86 "86" + t87 87 "87" + t88 88 "88" + t89 89 "89" + t90 90 "90" + t91 91 "91" + t92 92 "92" + t93 93 "93" + t94 94 "94" + t95 95 "95" + t96 96 "96" + t97 97 "97" + t98 98 "98" + t99 99 "99" + t100 100 "100" + t101 101 "101" + t102 102 "102" + t103 103 "103" + t104 104 "104" + t105 105 "105" + t106 106 "106" + t107 107 "107" + t108 108 "108" + t109 109 "109" + t110 110 "110" + t111 111 "111" + t112 112 "112" + t113 113 "113" + t114 114 "114" + t115 115 "115" + t116 116 "116" + t117 117 "117" + t118 118 "118" + t119 119 "119" + t120 120 "120" + t121 121 "121" + t122 122 "122" + t123 123 "123" + t124 124 "124" + t125 125 "125" + t126 126 "126" + t127 127 "127" + t128 128 "128" + t129 129 "129" + t130 130 "130" + t131 131 "131" + t132 132 "132" + t133 133 "133" + t134 134 "134" + t135 135 "135" + t136 136 "136" + t137 137 "137" + t138 138 "138" + t139 139 "139" + t140 140 "140" + t141 141 "141" + t142 142 "142" + t143 143 "143" + t144 144 "144" + t145 145 "145" + t146 146 "146" + t147 147 "147" + t148 148 "148" + t149 149 "149" + t150 150 "150" + t151 151 "151" + t152 152 "152" + t153 153 "153" + t154 154 "154" + t155 155 "155" + t156 156 "156" + t157 157 "157" + t158 158 "158" + t159 159 "159" + t160 160 "160" + t161 161 "161" + t162 162 "162" + t163 163 "163" + t164 164 "164" + t165 165 "165" + t166 166 "166" + t167 167 "167" + t168 168 "168" + t169 169 "169" + t170 170 "170" + t171 171 "171" + t172 172 "172" + t173 173 "173" + t174 174 "174" + t175 175 "175" + t176 176 "176" + t177 177 "177" + t178 178 "178" + t179 179 "179" + t180 180 "180" + t181 181 "181" + t182 182 "182" + t183 183 "183" + t184 184 "184" + t185 185 "185" + t186 186 "186" + t187 187 "187" + t188 188 "188" + t189 189 "189" + t190 190 "190" + t191 191 "191" + t192 192 "192" + t193 193 "193" + t194 194 "194" + t195 195 "195" + t196 196 "196" + t197 197 "197" + t198 198 "198" + t199 199 "199" + t200 200 "200" + t201 201 "201" + t202 202 "202" + t203 203 "203" + t204 204 "204" + t205 205 "205" + t206 206 "206" + t207 207 "207" + t208 208 "208" + t209 209 "209" + t210 210 "210" + t211 211 "211" + t212 212 "212" + t213 213 "213" + t214 214 "214" + t215 215 "215" + t216 216 "216" + t217 217 "217" + t218 218 "218" + t219 219 "219" + t220 220 "220" + t221 221 "221" + t222 222 "222" + t223 223 "223" + t224 224 "224" + t225 225 "225" + t226 226 "226" + t227 227 "227" + t228 228 "228" + t229 229 "229" + t230 230 "230" + t231 231 "231" + t232 232 "232" + t233 233 "233" + t234 234 "234" + t235 235 "235" + t236 236 "236" + t237 237 "237" + t238 238 "238" + t239 239 "239" + t240 240 "240" + t241 241 "241" + t242 242 "242" + t243 243 "243" + t244 244 "244" + t245 245 "245" + t246 246 "246" + t247 247 "247" + t248 248 "248" + t249 249 "249" + t250 250 "250" + t251 251 "251" + t252 252 "252" + t253 253 "253" + t254 254 "254" + t255 255 "255" + t256 256 "256" + t257 257 "257" + t258 258 "258" + t259 259 "259" + t260 260 "260" + t261 261 "261" + t262 262 "262" + t263 263 "263" + t264 264 "264" + t265 265 "265" + t266 266 "266" + t267 267 "267" + t268 268 "268" + t269 269 "269" + t270 270 "270" + t271 271 "271" + t272 272 "272" + t273 273 "273" + t274 274 "274" + t275 275 "275" + t276 276 "276" + t277 277 "277" + t278 278 "278" + t279 279 "279" + t280 280 "280" + t281 281 "281" + t282 282 "282" + t283 283 "283" + t284 284 "284" + t285 285 "285" + t286 286 "286" + t287 287 "287" + t288 288 "288" + t289 289 "289" + t290 290 "290" + t291 291 "291" + t292 292 "292" + t293 293 "293" + t294 294 "294" + t295 295 "295" + t296 296 "296" + t297 297 "297" + t298 298 "298" + t299 299 "299" + t300 300 "300" + t301 301 "301" + t302 302 "302" + t303 303 "303" + t304 304 "304" + t305 305 "305" + t306 306 "306" + t307 307 "307" + t308 308 "308" + t309 309 "309" + t310 310 "310" + t311 311 "311" + t312 312 "312" + t313 313 "313" + t314 314 "314" + t315 315 "315" + t316 316 "316" + t317 317 "317" + t318 318 "318" + t319 319 "319" + t320 320 "320" + t321 321 "321" + t322 322 "322" + t323 323 "323" + t324 324 "324" + t325 325 "325" + t326 326 "326" + t327 327 "327" + t328 328 "328" + t329 329 "329" + t330 330 "330" + t331 331 "331" + t332 332 "332" + t333 333 "333" + t334 334 "334" + t335 335 "335" + t336 336 "336" + t337 337 "337" + t338 338 "338" + t339 339 "339" + t340 340 "340" + t341 341 "341" + t342 342 "342" + t343 343 "343" + t344 344 "344" + t345 345 "345" + t346 346 "346" + t347 347 "347" + t348 348 "348" + t349 349 "349" + t350 350 "350" + t351 351 "351" + t352 352 "352" + t353 353 "353" + t354 354 "354" + t355 355 "355" + t356 356 "356" + t357 357 "357" + t358 358 "358" + t359 359 "359" + t360 360 "360" + t361 361 "361" + t362 362 "362" + t363 363 "363" + t364 364 "364" + t365 365 "365" + t366 366 "366" + t367 367 "367" + t368 368 "368" + t369 369 "369" + t370 370 "370" + t371 371 "371" + t372 372 "372" + t373 373 "373" + t374 374 "374" + t375 375 "375" + t376 376 "376" + t377 377 "377" + t378 378 "378" + t379 379 "379" + t380 380 "380" + t381 381 "381" + t382 382 "382" + t383 383 "383" + t384 384 "384" + t385 385 "385" + t386 386 "386" + t387 387 "387" + t388 388 "388" + t389 389 "389" + t390 390 "390" + t391 391 "391" + t392 392 "392" + t393 393 "393" + t394 394 "394" + t395 395 "395" + t396 396 "396" + t397 397 "397" + t398 398 "398" + t399 399 "399" + t400 400 "400" + t401 401 "401" + t402 402 "402" + t403 403 "403" + t404 404 "404" + t405 405 "405" + t406 406 "406" + t407 407 "407" + t408 408 "408" + t409 409 "409" + t410 410 "410" + t411 411 "411" + t412 412 "412" + t413 413 "413" + t414 414 "414" + t415 415 "415" + t416 416 "416" + t417 417 "417" + t418 418 "418" + t419 419 "419" + t420 420 "420" + t421 421 "421" + t422 422 "422" + t423 423 "423" + t424 424 "424" + t425 425 "425" + t426 426 "426" + t427 427 "427" + t428 428 "428" + t429 429 "429" + t430 430 "430" + t431 431 "431" + t432 432 "432" + t433 433 "433" + t434 434 "434" + t435 435 "435" + t436 436 "436" + t437 437 "437" + t438 438 "438" + t439 439 "439" + t440 440 "440" + t441 441 "441" + t442 442 "442" + t443 443 "443" + t444 444 "444" + t445 445 "445" + t446 446 "446" + t447 447 "447" + t448 448 "448" + t449 449 "449" + t450 450 "450" + t451 451 "451" + t452 452 "452" + t453 453 "453" + t454 454 "454" + t455 455 "455" + t456 456 "456" + t457 457 "457" + t458 458 "458" + t459 459 "459" + t460 460 "460" + t461 461 "461" + t462 462 "462" + t463 463 "463" + t464 464 "464" + t465 465 "465" + t466 466 "466" + t467 467 "467" + t468 468 "468" + t469 469 "469" + t470 470 "470" + t471 471 "471" + t472 472 "472" + t473 473 "473" + t474 474 "474" + t475 475 "475" + t476 476 "476" + t477 477 "477" + t478 478 "478" + t479 479 "479" + t480 480 "480" + t481 481 "481" + t482 482 "482" + t483 483 "483" + t484 484 "484" + t485 485 "485" + t486 486 "486" + t487 487 "487" + t488 488 "488" + t489 489 "489" + t490 490 "490" + t491 491 "491" + t492 492 "492" + t493 493 "493" + t494 494 "494" + t495 495 "495" + t496 496 "496" + t497 497 "497" + t498 498 "498" + t499 499 "499" + t500 500 "500" + t501 501 "501" + t502 502 "502" + t503 503 "503" + t504 504 "504" + t505 505 "505" + t506 506 "506" + t507 507 "507" + t508 508 "508" + t509 509 "509" + t510 510 "510" + t511 511 "511" + t512 512 "512" + t513 513 "513" + t514 514 "514" + t515 515 "515" + t516 516 "516" + t517 517 "517" + t518 518 "518" + t519 519 "519" + t520 520 "520" + t521 521 "521" + t522 522 "522" + t523 523 "523" + t524 524 "524" + t525 525 "525" + t526 526 "526" + t527 527 "527" + t528 528 "528" + t529 529 "529" + t530 530 "530" + t531 531 "531" + t532 532 "532" + t533 533 "533" + t534 534 "534" + t535 535 "535" + t536 536 "536" + t537 537 "537" + t538 538 "538" + t539 539 "539" + t540 540 "540" + t541 541 "541" + t542 542 "542" + t543 543 "543" + t544 544 "544" + t545 545 "545" + t546 546 "546" + t547 547 "547" + t548 548 "548" + t549 549 "549" + t550 550 "550" + t551 551 "551" + t552 552 "552" + t553 553 "553" + t554 554 "554" + t555 555 "555" + t556 556 "556" + t557 557 "557" + t558 558 "558" + t559 559 "559" + t560 560 "560" + t561 561 "561" + t562 562 "562" + t563 563 "563" + t564 564 "564" + t565 565 "565" + t566 566 "566" + t567 567 "567" + t568 568 "568" + t569 569 "569" + t570 570 "570" + t571 571 "571" + t572 572 "572" + t573 573 "573" + t574 574 "574" + t575 575 "575" + t576 576 "576" + t577 577 "577" + t578 578 "578" + t579 579 "579" + t580 580 "580" + t581 581 "581" + t582 582 "582" + t583 583 "583" + t584 584 "584" + t585 585 "585" + t586 586 "586" + t587 587 "587" + t588 588 "588" + t589 589 "589" + t590 590 "590" + t591 591 "591" + t592 592 "592" + t593 593 "593" + t594 594 "594" + t595 595 "595" + t596 596 "596" + t597 597 "597" + t598 598 "598" + t599 599 "599" + t600 600 "600" + t601 601 "601" + t602 602 "602" + t603 603 "603" + t604 604 "604" + t605 605 "605" + t606 606 "606" + t607 607 "607" + t608 608 "608" + t609 609 "609" + t610 610 "610" + t611 611 "611" + t612 612 "612" + t613 613 "613" + t614 614 "614" + t615 615 "615" + t616 616 "616" + t617 617 "617" + t618 618 "618" + t619 619 "619" + t620 620 "620" + t621 621 "621" + t622 622 "622" + t623 623 "623" + t624 624 "624" + t625 625 "625" + t626 626 "626" + t627 627 "627" + t628 628 "628" + t629 629 "629" + t630 630 "630" + t631 631 "631" + t632 632 "632" + t633 633 "633" + t634 634 "634" + t635 635 "635" + t636 636 "636" + t637 637 "637" + t638 638 "638" + t639 639 "639" + t640 640 "640" + t641 641 "641" + t642 642 "642" + t643 643 "643" + t644 644 "644" + t645 645 "645" + t646 646 "646" + t647 647 "647" + t648 648 "648" + t649 649 "649" + t650 650 "650" + t651 651 "651" + t652 652 "652" + t653 653 "653" + t654 654 "654" + t655 655 "655" + t656 656 "656" + t657 657 "657" + t658 658 "658" + t659 659 "659" + t660 660 "660" + t661 661 "661" + t662 662 "662" + t663 663 "663" + t664 664 "664" + t665 665 "665" + t666 666 "666" + t667 667 "667" + t668 668 "668" + t669 669 "669" + t670 670 "670" + t671 671 "671" + t672 672 "672" + t673 673 "673" + t674 674 "674" + t675 675 "675" + t676 676 "676" + t677 677 "677" + t678 678 "678" + t679 679 "679" + t680 680 "680" + t681 681 "681" + t682 682 "682" + t683 683 "683" + t684 684 "684" + t685 685 "685" + t686 686 "686" + t687 687 "687" + t688 688 "688" + t689 689 "689" + t690 690 "690" + t691 691 "691" + t692 692 "692" + t693 693 "693" + t694 694 "694" + t695 695 "695" + t696 696 "696" + t697 697 "697" + t698 698 "698" + t699 699 "699" + t700 700 "700" + t701 701 "701" + t702 702 "702" + t703 703 "703" + t704 704 "704" + t705 705 "705" + t706 706 "706" + t707 707 "707" + t708 708 "708" + t709 709 "709" + t710 710 "710" + t711 711 "711" + t712 712 "712" + t713 713 "713" + t714 714 "714" + t715 715 "715" + t716 716 "716" + t717 717 "717" + t718 718 "718" + t719 719 "719" + t720 720 "720" + t721 721 "721" + t722 722 "722" + t723 723 "723" + t724 724 "724" + t725 725 "725" + t726 726 "726" + t727 727 "727" + t728 728 "728" + t729 729 "729" + t730 730 "730" + t731 731 "731" + t732 732 "732" + t733 733 "733" + t734 734 "734" + t735 735 "735" + t736 736 "736" + t737 737 "737" + t738 738 "738" + t739 739 "739" + t740 740 "740" + t741 741 "741" + t742 742 "742" + t743 743 "743" + t744 744 "744" + t745 745 "745" + t746 746 "746" + t747 747 "747" + t748 748 "748" + t749 749 "749" + t750 750 "750" + t751 751 "751" + t752 752 "752" + t753 753 "753" + t754 754 "754" + t755 755 "755" + t756 756 "756" + t757 757 "757" + t758 758 "758" + t759 759 "759" + t760 760 "760" + t761 761 "761" + t762 762 "762" + t763 763 "763" + t764 764 "764" + t765 765 "765" + t766 766 "766" + t767 767 "767" + t768 768 "768" + t769 769 "769" + t770 770 "770" + t771 771 "771" + t772 772 "772" + t773 773 "773" + t774 774 "774" + t775 775 "775" + t776 776 "776" + t777 777 "777" + t778 778 "778" + t779 779 "779" + t780 780 "780" + t781 781 "781" + t782 782 "782" + t783 783 "783" + t784 784 "784" + t785 785 "785" + t786 786 "786" + t787 787 "787" + t788 788 "788" + t789 789 "789" + t790 790 "790" + t791 791 "791" + t792 792 "792" + t793 793 "793" + t794 794 "794" + t795 795 "795" + t796 796 "796" + t797 797 "797" + t798 798 "798" + t799 799 "799" + t800 800 "800" + t801 801 "801" + t802 802 "802" + t803 803 "803" + t804 804 "804" + t805 805 "805" + t806 806 "806" + t807 807 "807" + t808 808 "808" + t809 809 "809" + t810 810 "810" + t811 811 "811" + t812 812 "812" + t813 813 "813" + t814 814 "814" + t815 815 "815" + t816 816 "816" + t817 817 "817" + t818 818 "818" + t819 819 "819" + t820 820 "820" + t821 821 "821" + t822 822 "822" + t823 823 "823" + t824 824 "824" + t825 825 "825" + t826 826 "826" + t827 827 "827" + t828 828 "828" + t829 829 "829" + t830 830 "830" + t831 831 "831" + t832 832 "832" + t833 833 "833" + t834 834 "834" + t835 835 "835" + t836 836 "836" + t837 837 "837" + t838 838 "838" + t839 839 "839" + t840 840 "840" + t841 841 "841" + t842 842 "842" + t843 843 "843" + t844 844 "844" + t845 845 "845" + t846 846 "846" + t847 847 "847" + t848 848 "848" + t849 849 "849" + t850 850 "850" + t851 851 "851" + t852 852 "852" + t853 853 "853" + t854 854 "854" + t855 855 "855" + t856 856 "856" + t857 857 "857" + t858 858 "858" + t859 859 "859" + t860 860 "860" + t861 861 "861" + t862 862 "862" + t863 863 "863" + t864 864 "864" + t865 865 "865" + t866 866 "866" + t867 867 "867" + t868 868 "868" + t869 869 "869" + t870 870 "870" + t871 871 "871" + t872 872 "872" + t873 873 "873" + t874 874 "874" + t875 875 "875" + t876 876 "876" + t877 877 "877" + t878 878 "878" + t879 879 "879" + t880 880 "880" + t881 881 "881" + t882 882 "882" + t883 883 "883" + t884 884 "884" + t885 885 "885" + t886 886 "886" + t887 887 "887" + t888 888 "888" + t889 889 "889" + t890 890 "890" + t891 891 "891" + t892 892 "892" + t893 893 "893" + t894 894 "894" + t895 895 "895" + t896 896 "896" + t897 897 "897" + t898 898 "898" + t899 899 "899" + t900 900 "900" + t901 901 "901" + t902 902 "902" + t903 903 "903" + t904 904 "904" + t905 905 "905" + t906 906 "906" + t907 907 "907" + t908 908 "908" + t909 909 "909" + t910 910 "910" + t911 911 "911" + t912 912 "912" + t913 913 "913" + t914 914 "914" + t915 915 "915" + t916 916 "916" + t917 917 "917" + t918 918 "918" + t919 919 "919" + t920 920 "920" + t921 921 "921" + t922 922 "922" + t923 923 "923" + t924 924 "924" + t925 925 "925" + t926 926 "926" + t927 927 "927" + t928 928 "928" + t929 929 "929" + t930 930 "930" + t931 931 "931" + t932 932 "932" + t933 933 "933" + t934 934 "934" + t935 935 "935" + t936 936 "936" + t937 937 "937" + t938 938 "938" + t939 939 "939" + t940 940 "940" + t941 941 "941" + t942 942 "942" + t943 943 "943" + t944 944 "944" + t945 945 "945" + t946 946 "946" + t947 947 "947" + t948 948 "948" + t949 949 "949" + t950 950 "950" + t951 951 "951" + t952 952 "952" + t953 953 "953" + t954 954 "954" + t955 955 "955" + t956 956 "956" + t957 957 "957" + t958 958 "958" + t959 959 "959" + t960 960 "960" + t961 961 "961" + t962 962 "962" + t963 963 "963" + t964 964 "964" + t965 965 "965" + t966 966 "966" + t967 967 "967" + t968 968 "968" + t969 969 "969" + t970 970 "970" + t971 971 "971" + t972 972 "972" + t973 973 "973" + t974 974 "974" + t975 975 "975" + t976 976 "976" + t977 977 "977" + t978 978 "978" + t979 979 "979" + t980 980 "980" + t981 981 "981" + t982 982 "982" + t983 983 "983" + t984 984 "984" + t985 985 "985" + t986 986 "986" + t987 987 "987" + t988 988 "988" + t989 989 "989" + t990 990 "990" + t991 991 "991" + t992 992 "992" + t993 993 "993" + t994 994 "994" + t995 995 "995" + t996 996 "996" + t997 997 "997" + t998 998 "998" + t999 999 "999" + t1000 1000 "1000" +%% +input: + exp { assert ($1 == 1); $$ = $1; } +| input exp { assert ($2 == $1 + 1); $$ = $2; } +; + +exp: + n1 "1" { assert ($1 == 1); $$ = $1; } +| n2 "2" { assert ($1 == 2); $$ = $1; } +| n3 "3" { assert ($1 == 3); $$ = $1; } +| n4 "4" { assert ($1 == 4); $$ = $1; } +| n5 "5" { assert ($1 == 5); $$ = $1; } +| n6 "6" { assert ($1 == 6); $$ = $1; } +| n7 "7" { assert ($1 == 7); $$ = $1; } +| n8 "8" { assert ($1 == 8); $$ = $1; } +| n9 "9" { assert ($1 == 9); $$ = $1; } +| n10 "10" { assert ($1 == 10); $$ = $1; } +| n11 "11" { assert ($1 == 11); $$ = $1; } +| n12 "12" { assert ($1 == 12); $$ = $1; } +| n13 "13" { assert ($1 == 13); $$ = $1; } +| n14 "14" { assert ($1 == 14); $$ = $1; } +| n15 "15" { assert ($1 == 15); $$ = $1; } +| n16 "16" { assert ($1 == 16); $$ = $1; } +| n17 "17" { assert ($1 == 17); $$ = $1; } +| n18 "18" { assert ($1 == 18); $$ = $1; } +| n19 "19" { assert ($1 == 19); $$ = $1; } +| n20 "20" { assert ($1 == 20); $$ = $1; } +| n21 "21" { assert ($1 == 21); $$ = $1; } +| n22 "22" { assert ($1 == 22); $$ = $1; } +| n23 "23" { assert ($1 == 23); $$ = $1; } +| n24 "24" { assert ($1 == 24); $$ = $1; } +| n25 "25" { assert ($1 == 25); $$ = $1; } +| n26 "26" { assert ($1 == 26); $$ = $1; } +| n27 "27" { assert ($1 == 27); $$ = $1; } +| n28 "28" { assert ($1 == 28); $$ = $1; } +| n29 "29" { assert ($1 == 29); $$ = $1; } +| n30 "30" { assert ($1 == 30); $$ = $1; } +| n31 "31" { assert ($1 == 31); $$ = $1; } +| n32 "32" { assert ($1 == 32); $$ = $1; } +| n33 "33" { assert ($1 == 33); $$ = $1; } +| n34 "34" { assert ($1 == 34); $$ = $1; } +| n35 "35" { assert ($1 == 35); $$ = $1; } +| n36 "36" { assert ($1 == 36); $$ = $1; } +| n37 "37" { assert ($1 == 37); $$ = $1; } +| n38 "38" { assert ($1 == 38); $$ = $1; } +| n39 "39" { assert ($1 == 39); $$ = $1; } +| n40 "40" { assert ($1 == 40); $$ = $1; } +| n41 "41" { assert ($1 == 41); $$ = $1; } +| n42 "42" { assert ($1 == 42); $$ = $1; } +| n43 "43" { assert ($1 == 43); $$ = $1; } +| n44 "44" { assert ($1 == 44); $$ = $1; } +| n45 "45" { assert ($1 == 45); $$ = $1; } +| n46 "46" { assert ($1 == 46); $$ = $1; } +| n47 "47" { assert ($1 == 47); $$ = $1; } +| n48 "48" { assert ($1 == 48); $$ = $1; } +| n49 "49" { assert ($1 == 49); $$ = $1; } +| n50 "50" { assert ($1 == 50); $$ = $1; } +| n51 "51" { assert ($1 == 51); $$ = $1; } +| n52 "52" { assert ($1 == 52); $$ = $1; } +| n53 "53" { assert ($1 == 53); $$ = $1; } +| n54 "54" { assert ($1 == 54); $$ = $1; } +| n55 "55" { assert ($1 == 55); $$ = $1; } +| n56 "56" { assert ($1 == 56); $$ = $1; } +| n57 "57" { assert ($1 == 57); $$ = $1; } +| n58 "58" { assert ($1 == 58); $$ = $1; } +| n59 "59" { assert ($1 == 59); $$ = $1; } +| n60 "60" { assert ($1 == 60); $$ = $1; } +| n61 "61" { assert ($1 == 61); $$ = $1; } +| n62 "62" { assert ($1 == 62); $$ = $1; } +| n63 "63" { assert ($1 == 63); $$ = $1; } +| n64 "64" { assert ($1 == 64); $$ = $1; } +| n65 "65" { assert ($1 == 65); $$ = $1; } +| n66 "66" { assert ($1 == 66); $$ = $1; } +| n67 "67" { assert ($1 == 67); $$ = $1; } +| n68 "68" { assert ($1 == 68); $$ = $1; } +| n69 "69" { assert ($1 == 69); $$ = $1; } +| n70 "70" { assert ($1 == 70); $$ = $1; } +| n71 "71" { assert ($1 == 71); $$ = $1; } +| n72 "72" { assert ($1 == 72); $$ = $1; } +| n73 "73" { assert ($1 == 73); $$ = $1; } +| n74 "74" { assert ($1 == 74); $$ = $1; } +| n75 "75" { assert ($1 == 75); $$ = $1; } +| n76 "76" { assert ($1 == 76); $$ = $1; } +| n77 "77" { assert ($1 == 77); $$ = $1; } +| n78 "78" { assert ($1 == 78); $$ = $1; } +| n79 "79" { assert ($1 == 79); $$ = $1; } +| n80 "80" { assert ($1 == 80); $$ = $1; } +| n81 "81" { assert ($1 == 81); $$ = $1; } +| n82 "82" { assert ($1 == 82); $$ = $1; } +| n83 "83" { assert ($1 == 83); $$ = $1; } +| n84 "84" { assert ($1 == 84); $$ = $1; } +| n85 "85" { assert ($1 == 85); $$ = $1; } +| n86 "86" { assert ($1 == 86); $$ = $1; } +| n87 "87" { assert ($1 == 87); $$ = $1; } +| n88 "88" { assert ($1 == 88); $$ = $1; } +| n89 "89" { assert ($1 == 89); $$ = $1; } +| n90 "90" { assert ($1 == 90); $$ = $1; } +| n91 "91" { assert ($1 == 91); $$ = $1; } +| n92 "92" { assert ($1 == 92); $$ = $1; } +| n93 "93" { assert ($1 == 93); $$ = $1; } +| n94 "94" { assert ($1 == 94); $$ = $1; } +| n95 "95" { assert ($1 == 95); $$ = $1; } +| n96 "96" { assert ($1 == 96); $$ = $1; } +| n97 "97" { assert ($1 == 97); $$ = $1; } +| n98 "98" { assert ($1 == 98); $$ = $1; } +| n99 "99" { assert ($1 == 99); $$ = $1; } +| n100 "100" { assert ($1 == 100); $$ = $1; } +| n101 "101" { assert ($1 == 101); $$ = $1; } +| n102 "102" { assert ($1 == 102); $$ = $1; } +| n103 "103" { assert ($1 == 103); $$ = $1; } +| n104 "104" { assert ($1 == 104); $$ = $1; } +| n105 "105" { assert ($1 == 105); $$ = $1; } +| n106 "106" { assert ($1 == 106); $$ = $1; } +| n107 "107" { assert ($1 == 107); $$ = $1; } +| n108 "108" { assert ($1 == 108); $$ = $1; } +| n109 "109" { assert ($1 == 109); $$ = $1; } +| n110 "110" { assert ($1 == 110); $$ = $1; } +| n111 "111" { assert ($1 == 111); $$ = $1; } +| n112 "112" { assert ($1 == 112); $$ = $1; } +| n113 "113" { assert ($1 == 113); $$ = $1; } +| n114 "114" { assert ($1 == 114); $$ = $1; } +| n115 "115" { assert ($1 == 115); $$ = $1; } +| n116 "116" { assert ($1 == 116); $$ = $1; } +| n117 "117" { assert ($1 == 117); $$ = $1; } +| n118 "118" { assert ($1 == 118); $$ = $1; } +| n119 "119" { assert ($1 == 119); $$ = $1; } +| n120 "120" { assert ($1 == 120); $$ = $1; } +| n121 "121" { assert ($1 == 121); $$ = $1; } +| n122 "122" { assert ($1 == 122); $$ = $1; } +| n123 "123" { assert ($1 == 123); $$ = $1; } +| n124 "124" { assert ($1 == 124); $$ = $1; } +| n125 "125" { assert ($1 == 125); $$ = $1; } +| n126 "126" { assert ($1 == 126); $$ = $1; } +| n127 "127" { assert ($1 == 127); $$ = $1; } +| n128 "128" { assert ($1 == 128); $$ = $1; } +| n129 "129" { assert ($1 == 129); $$ = $1; } +| n130 "130" { assert ($1 == 130); $$ = $1; } +| n131 "131" { assert ($1 == 131); $$ = $1; } +| n132 "132" { assert ($1 == 132); $$ = $1; } +| n133 "133" { assert ($1 == 133); $$ = $1; } +| n134 "134" { assert ($1 == 134); $$ = $1; } +| n135 "135" { assert ($1 == 135); $$ = $1; } +| n136 "136" { assert ($1 == 136); $$ = $1; } +| n137 "137" { assert ($1 == 137); $$ = $1; } +| n138 "138" { assert ($1 == 138); $$ = $1; } +| n139 "139" { assert ($1 == 139); $$ = $1; } +| n140 "140" { assert ($1 == 140); $$ = $1; } +| n141 "141" { assert ($1 == 141); $$ = $1; } +| n142 "142" { assert ($1 == 142); $$ = $1; } +| n143 "143" { assert ($1 == 143); $$ = $1; } +| n144 "144" { assert ($1 == 144); $$ = $1; } +| n145 "145" { assert ($1 == 145); $$ = $1; } +| n146 "146" { assert ($1 == 146); $$ = $1; } +| n147 "147" { assert ($1 == 147); $$ = $1; } +| n148 "148" { assert ($1 == 148); $$ = $1; } +| n149 "149" { assert ($1 == 149); $$ = $1; } +| n150 "150" { assert ($1 == 150); $$ = $1; } +| n151 "151" { assert ($1 == 151); $$ = $1; } +| n152 "152" { assert ($1 == 152); $$ = $1; } +| n153 "153" { assert ($1 == 153); $$ = $1; } +| n154 "154" { assert ($1 == 154); $$ = $1; } +| n155 "155" { assert ($1 == 155); $$ = $1; } +| n156 "156" { assert ($1 == 156); $$ = $1; } +| n157 "157" { assert ($1 == 157); $$ = $1; } +| n158 "158" { assert ($1 == 158); $$ = $1; } +| n159 "159" { assert ($1 == 159); $$ = $1; } +| n160 "160" { assert ($1 == 160); $$ = $1; } +| n161 "161" { assert ($1 == 161); $$ = $1; } +| n162 "162" { assert ($1 == 162); $$ = $1; } +| n163 "163" { assert ($1 == 163); $$ = $1; } +| n164 "164" { assert ($1 == 164); $$ = $1; } +| n165 "165" { assert ($1 == 165); $$ = $1; } +| n166 "166" { assert ($1 == 166); $$ = $1; } +| n167 "167" { assert ($1 == 167); $$ = $1; } +| n168 "168" { assert ($1 == 168); $$ = $1; } +| n169 "169" { assert ($1 == 169); $$ = $1; } +| n170 "170" { assert ($1 == 170); $$ = $1; } +| n171 "171" { assert ($1 == 171); $$ = $1; } +| n172 "172" { assert ($1 == 172); $$ = $1; } +| n173 "173" { assert ($1 == 173); $$ = $1; } +| n174 "174" { assert ($1 == 174); $$ = $1; } +| n175 "175" { assert ($1 == 175); $$ = $1; } +| n176 "176" { assert ($1 == 176); $$ = $1; } +| n177 "177" { assert ($1 == 177); $$ = $1; } +| n178 "178" { assert ($1 == 178); $$ = $1; } +| n179 "179" { assert ($1 == 179); $$ = $1; } +| n180 "180" { assert ($1 == 180); $$ = $1; } +| n181 "181" { assert ($1 == 181); $$ = $1; } +| n182 "182" { assert ($1 == 182); $$ = $1; } +| n183 "183" { assert ($1 == 183); $$ = $1; } +| n184 "184" { assert ($1 == 184); $$ = $1; } +| n185 "185" { assert ($1 == 185); $$ = $1; } +| n186 "186" { assert ($1 == 186); $$ = $1; } +| n187 "187" { assert ($1 == 187); $$ = $1; } +| n188 "188" { assert ($1 == 188); $$ = $1; } +| n189 "189" { assert ($1 == 189); $$ = $1; } +| n190 "190" { assert ($1 == 190); $$ = $1; } +| n191 "191" { assert ($1 == 191); $$ = $1; } +| n192 "192" { assert ($1 == 192); $$ = $1; } +| n193 "193" { assert ($1 == 193); $$ = $1; } +| n194 "194" { assert ($1 == 194); $$ = $1; } +| n195 "195" { assert ($1 == 195); $$ = $1; } +| n196 "196" { assert ($1 == 196); $$ = $1; } +| n197 "197" { assert ($1 == 197); $$ = $1; } +| n198 "198" { assert ($1 == 198); $$ = $1; } +| n199 "199" { assert ($1 == 199); $$ = $1; } +| n200 "200" { assert ($1 == 200); $$ = $1; } +| n201 "201" { assert ($1 == 201); $$ = $1; } +| n202 "202" { assert ($1 == 202); $$ = $1; } +| n203 "203" { assert ($1 == 203); $$ = $1; } +| n204 "204" { assert ($1 == 204); $$ = $1; } +| n205 "205" { assert ($1 == 205); $$ = $1; } +| n206 "206" { assert ($1 == 206); $$ = $1; } +| n207 "207" { assert ($1 == 207); $$ = $1; } +| n208 "208" { assert ($1 == 208); $$ = $1; } +| n209 "209" { assert ($1 == 209); $$ = $1; } +| n210 "210" { assert ($1 == 210); $$ = $1; } +| n211 "211" { assert ($1 == 211); $$ = $1; } +| n212 "212" { assert ($1 == 212); $$ = $1; } +| n213 "213" { assert ($1 == 213); $$ = $1; } +| n214 "214" { assert ($1 == 214); $$ = $1; } +| n215 "215" { assert ($1 == 215); $$ = $1; } +| n216 "216" { assert ($1 == 216); $$ = $1; } +| n217 "217" { assert ($1 == 217); $$ = $1; } +| n218 "218" { assert ($1 == 218); $$ = $1; } +| n219 "219" { assert ($1 == 219); $$ = $1; } +| n220 "220" { assert ($1 == 220); $$ = $1; } +| n221 "221" { assert ($1 == 221); $$ = $1; } +| n222 "222" { assert ($1 == 222); $$ = $1; } +| n223 "223" { assert ($1 == 223); $$ = $1; } +| n224 "224" { assert ($1 == 224); $$ = $1; } +| n225 "225" { assert ($1 == 225); $$ = $1; } +| n226 "226" { assert ($1 == 226); $$ = $1; } +| n227 "227" { assert ($1 == 227); $$ = $1; } +| n228 "228" { assert ($1 == 228); $$ = $1; } +| n229 "229" { assert ($1 == 229); $$ = $1; } +| n230 "230" { assert ($1 == 230); $$ = $1; } +| n231 "231" { assert ($1 == 231); $$ = $1; } +| n232 "232" { assert ($1 == 232); $$ = $1; } +| n233 "233" { assert ($1 == 233); $$ = $1; } +| n234 "234" { assert ($1 == 234); $$ = $1; } +| n235 "235" { assert ($1 == 235); $$ = $1; } +| n236 "236" { assert ($1 == 236); $$ = $1; } +| n237 "237" { assert ($1 == 237); $$ = $1; } +| n238 "238" { assert ($1 == 238); $$ = $1; } +| n239 "239" { assert ($1 == 239); $$ = $1; } +| n240 "240" { assert ($1 == 240); $$ = $1; } +| n241 "241" { assert ($1 == 241); $$ = $1; } +| n242 "242" { assert ($1 == 242); $$ = $1; } +| n243 "243" { assert ($1 == 243); $$ = $1; } +| n244 "244" { assert ($1 == 244); $$ = $1; } +| n245 "245" { assert ($1 == 245); $$ = $1; } +| n246 "246" { assert ($1 == 246); $$ = $1; } +| n247 "247" { assert ($1 == 247); $$ = $1; } +| n248 "248" { assert ($1 == 248); $$ = $1; } +| n249 "249" { assert ($1 == 249); $$ = $1; } +| n250 "250" { assert ($1 == 250); $$ = $1; } +| n251 "251" { assert ($1 == 251); $$ = $1; } +| n252 "252" { assert ($1 == 252); $$ = $1; } +| n253 "253" { assert ($1 == 253); $$ = $1; } +| n254 "254" { assert ($1 == 254); $$ = $1; } +| n255 "255" { assert ($1 == 255); $$ = $1; } +| n256 "256" { assert ($1 == 256); $$ = $1; } +| n257 "257" { assert ($1 == 257); $$ = $1; } +| n258 "258" { assert ($1 == 258); $$ = $1; } +| n259 "259" { assert ($1 == 259); $$ = $1; } +| n260 "260" { assert ($1 == 260); $$ = $1; } +| n261 "261" { assert ($1 == 261); $$ = $1; } +| n262 "262" { assert ($1 == 262); $$ = $1; } +| n263 "263" { assert ($1 == 263); $$ = $1; } +| n264 "264" { assert ($1 == 264); $$ = $1; } +| n265 "265" { assert ($1 == 265); $$ = $1; } +| n266 "266" { assert ($1 == 266); $$ = $1; } +| n267 "267" { assert ($1 == 267); $$ = $1; } +| n268 "268" { assert ($1 == 268); $$ = $1; } +| n269 "269" { assert ($1 == 269); $$ = $1; } +| n270 "270" { assert ($1 == 270); $$ = $1; } +| n271 "271" { assert ($1 == 271); $$ = $1; } +| n272 "272" { assert ($1 == 272); $$ = $1; } +| n273 "273" { assert ($1 == 273); $$ = $1; } +| n274 "274" { assert ($1 == 274); $$ = $1; } +| n275 "275" { assert ($1 == 275); $$ = $1; } +| n276 "276" { assert ($1 == 276); $$ = $1; } +| n277 "277" { assert ($1 == 277); $$ = $1; } +| n278 "278" { assert ($1 == 278); $$ = $1; } +| n279 "279" { assert ($1 == 279); $$ = $1; } +| n280 "280" { assert ($1 == 280); $$ = $1; } +| n281 "281" { assert ($1 == 281); $$ = $1; } +| n282 "282" { assert ($1 == 282); $$ = $1; } +| n283 "283" { assert ($1 == 283); $$ = $1; } +| n284 "284" { assert ($1 == 284); $$ = $1; } +| n285 "285" { assert ($1 == 285); $$ = $1; } +| n286 "286" { assert ($1 == 286); $$ = $1; } +| n287 "287" { assert ($1 == 287); $$ = $1; } +| n288 "288" { assert ($1 == 288); $$ = $1; } +| n289 "289" { assert ($1 == 289); $$ = $1; } +| n290 "290" { assert ($1 == 290); $$ = $1; } +| n291 "291" { assert ($1 == 291); $$ = $1; } +| n292 "292" { assert ($1 == 292); $$ = $1; } +| n293 "293" { assert ($1 == 293); $$ = $1; } +| n294 "294" { assert ($1 == 294); $$ = $1; } +| n295 "295" { assert ($1 == 295); $$ = $1; } +| n296 "296" { assert ($1 == 296); $$ = $1; } +| n297 "297" { assert ($1 == 297); $$ = $1; } +| n298 "298" { assert ($1 == 298); $$ = $1; } +| n299 "299" { assert ($1 == 299); $$ = $1; } +| n300 "300" { assert ($1 == 300); $$ = $1; } +| n301 "301" { assert ($1 == 301); $$ = $1; } +| n302 "302" { assert ($1 == 302); $$ = $1; } +| n303 "303" { assert ($1 == 303); $$ = $1; } +| n304 "304" { assert ($1 == 304); $$ = $1; } +| n305 "305" { assert ($1 == 305); $$ = $1; } +| n306 "306" { assert ($1 == 306); $$ = $1; } +| n307 "307" { assert ($1 == 307); $$ = $1; } +| n308 "308" { assert ($1 == 308); $$ = $1; } +| n309 "309" { assert ($1 == 309); $$ = $1; } +| n310 "310" { assert ($1 == 310); $$ = $1; } +| n311 "311" { assert ($1 == 311); $$ = $1; } +| n312 "312" { assert ($1 == 312); $$ = $1; } +| n313 "313" { assert ($1 == 313); $$ = $1; } +| n314 "314" { assert ($1 == 314); $$ = $1; } +| n315 "315" { assert ($1 == 315); $$ = $1; } +| n316 "316" { assert ($1 == 316); $$ = $1; } +| n317 "317" { assert ($1 == 317); $$ = $1; } +| n318 "318" { assert ($1 == 318); $$ = $1; } +| n319 "319" { assert ($1 == 319); $$ = $1; } +| n320 "320" { assert ($1 == 320); $$ = $1; } +| n321 "321" { assert ($1 == 321); $$ = $1; } +| n322 "322" { assert ($1 == 322); $$ = $1; } +| n323 "323" { assert ($1 == 323); $$ = $1; } +| n324 "324" { assert ($1 == 324); $$ = $1; } +| n325 "325" { assert ($1 == 325); $$ = $1; } +| n326 "326" { assert ($1 == 326); $$ = $1; } +| n327 "327" { assert ($1 == 327); $$ = $1; } +| n328 "328" { assert ($1 == 328); $$ = $1; } +| n329 "329" { assert ($1 == 329); $$ = $1; } +| n330 "330" { assert ($1 == 330); $$ = $1; } +| n331 "331" { assert ($1 == 331); $$ = $1; } +| n332 "332" { assert ($1 == 332); $$ = $1; } +| n333 "333" { assert ($1 == 333); $$ = $1; } +| n334 "334" { assert ($1 == 334); $$ = $1; } +| n335 "335" { assert ($1 == 335); $$ = $1; } +| n336 "336" { assert ($1 == 336); $$ = $1; } +| n337 "337" { assert ($1 == 337); $$ = $1; } +| n338 "338" { assert ($1 == 338); $$ = $1; } +| n339 "339" { assert ($1 == 339); $$ = $1; } +| n340 "340" { assert ($1 == 340); $$ = $1; } +| n341 "341" { assert ($1 == 341); $$ = $1; } +| n342 "342" { assert ($1 == 342); $$ = $1; } +| n343 "343" { assert ($1 == 343); $$ = $1; } +| n344 "344" { assert ($1 == 344); $$ = $1; } +| n345 "345" { assert ($1 == 345); $$ = $1; } +| n346 "346" { assert ($1 == 346); $$ = $1; } +| n347 "347" { assert ($1 == 347); $$ = $1; } +| n348 "348" { assert ($1 == 348); $$ = $1; } +| n349 "349" { assert ($1 == 349); $$ = $1; } +| n350 "350" { assert ($1 == 350); $$ = $1; } +| n351 "351" { assert ($1 == 351); $$ = $1; } +| n352 "352" { assert ($1 == 352); $$ = $1; } +| n353 "353" { assert ($1 == 353); $$ = $1; } +| n354 "354" { assert ($1 == 354); $$ = $1; } +| n355 "355" { assert ($1 == 355); $$ = $1; } +| n356 "356" { assert ($1 == 356); $$ = $1; } +| n357 "357" { assert ($1 == 357); $$ = $1; } +| n358 "358" { assert ($1 == 358); $$ = $1; } +| n359 "359" { assert ($1 == 359); $$ = $1; } +| n360 "360" { assert ($1 == 360); $$ = $1; } +| n361 "361" { assert ($1 == 361); $$ = $1; } +| n362 "362" { assert ($1 == 362); $$ = $1; } +| n363 "363" { ./calc.at:1494: cat stderr +assert ($1 == 363); $$ = $1; } +| n364 "364" { assert ($1 == 364); $$ = $1; } +| n365 "365" { assert ($1 == 365); $$ = $1; } +| n366 "366" { assert ($1 == 366); $$ = $1; } +| n367 "367" { assert ($1 == 367); $$ = $1; } +| n368 "368" { assert ($1 == 368); $$ = $1; } +| n369 "369" { assert ($1 == 369); $$ = $1; } +| n370 "370" { assert ($1 == 370); $$ = $1; } +| n371 "371" { assert ($1 == 371); $$ = $1; } +| n372 "372" { assert ($1 == 372); $$ = $1; } +| n373 "373" { assert ($1 == 373); $$ = $1; } +| n374 "374" { assert ($1 == 374); $$ = $1; } +| n375 "375" { assert ($1 == 375); $$ = $1; } +| n376 "376" { assert ($1 == 376); $$ = $1; } +| n377 "377" { assert ($1 == 377); $$ = $1; } +| n378 "378" { assert ($1 == 378); $$ = $1; } +| n379 "379" { assert ($1 == 379); $$ = $1; } +| n380 "380" { assert ($1 == 380); $$ = $1; } +| n381 "381" { assert ($1 == 381); $$ = $1; } +| n382 "382" { assert ($1 == 382); $$ = $1; } +| n383 "383" { assert ($1 == 383); $$ = $1; } +| n384 "384" { assert ($1 == 384); $$ = $1; } +| n385 "385" { assert ($1 == 385); $$ = $1; } +| n386 "386" { assert ($1 == 386); $$ = $1; } +| n387 "387" { assert ($1 == 387); $$ = $1; } +| n388 "388" { assert ($1 == 388); $$ = $1; } +| n389 "389" { assert ($1 == 389); $$ = $1; } +| n390 "390" { assert ($1 == 390); $$ = $1; } +| n391 "391" { assert ($1 == 391); $$ = $1; } +| n392 "392" { assert ($1 == 392); $$ = $1; } +| n393 "393" { assert ($1 == 393); $$ = $1; } +| n394 "394" { assert ($1 == 394); $$ = $1; } +| n395 "395" { assert ($1 == 395); $$ = $1; } +| n396 "396" { assert ($1 == 396); $$ = $1; } +| n397 "397" { assert ($1 == 397); $$ = $1; } +| n398 "398" { assert ($1 == 398); $$ = $1; } +| n399 "399" { assert ($1 == 399); $$ = $1; } +| n400 "400" { assert ($1 == 400); $$ = $1; } +| n401 "401" { assert ($1 == 401); $$ = $1; } +| n402 "402" { assert ($1 == 402); $$ = $1; } +| n403 "403" { assert ($1 == 403); $$ = $1; } +| n404 "404" { assert ($1 == 404); $$ = $1; } +| n405 "405" { assert ($1 == 405); $$ = $1; } +| n406 "406" { assert ($1 == 406); $$ = $1; } +| n407 "407" { assert ($1 == 407); $$ = $1; } +| n408 "408" { assert ($1 == 408); $$ = $1; } +| n409 "409" { assert ($1 == 409); $$ = $1; } +| n410 "410" { assert ($1 == 410); $$ = $1; } +| n411 "411" { assert ($1 == 411); $$ = $1; } +| n412 "412" { assert ($1 == 412); $$ = $1; } +| n413 "413" { assert ($1 == 413); $$ = $1; } +| n414 "414" { assert ($1 == 414); $$ = $1; } +| n415 "415" { assert ($1 == 415); $$ = $1; } +| n416 "416" { assert ($1 == 416); $$ = $1; } +| n417 "417" { assert ($1 == 417); $$ = $1; } +| n418 "418" { assert ($1 == 418); $$ = $1; } +| n419 "419" { assert ($1 == 419); $$ = $1; } +| n420 "420" { assert ($1 == 420); $$ = $1; } +| n421 "421" { assert ($1 == 421); $$ = $1; } +| n422 "422" { assert ($1 == 422); $$ = $1; } +| n423 "423" { assert ($1 == 423); $$ = $1; } +| n424 "424" { assert ($1 == 424); $$ = $1; } +| n425 "425" { assert ($1 == 425); $$ = $1; } +| n426 "426" { assert ($1 == 426); $$ = $1; } +| n427 "427" { assert ($1 == 427); $$ = $1; } +| n428 "428" { assert ($1 == 428); $$ = $1; } +| n429 "429" { assert ($1 == 429); $$ = $1; } +| n430 "430" { assert ($1 == 430); $$ = $1; } +| n431 "431" { assert ($1 == 431); $$ = $1; } +| n432 "432" { assert ($1 == 432); $$ = $1; } +| n433 "433" { assert ($1 == 433); $$ = $1; } +| n434 "434" { assert ($1 == 434); $$ = $1; } +| n435 "435" { assert ($1 == 435); $$ = $1; } +| n436 "436" { assert ($1 == 436); $$ = $1; } +| n437 "437" { assert ($1 == 437); $$ = $1; } +| n438 "438" { assert ($1 == 438); $$ = $1; } +| n439 "439" { assert ($1 == 439); $$ = $1; } +| n440 "440" { assert ($1 == 440); $$ = $1; } +| n441 "441" { assert ($1 == 441); $$ = $1; } +| n442 "442" { assert ($1 == 442); $$ = $1; } +| n443 "443" { assert ($1 == 443); $$ = $1; } +| n444 "444" { assert ($1 == 444); $$ = $1; } +| n445 "445" { assert ($1 == 445); $$ = $1; } +| n446 "446" { assert ($1 == 446); $$ = $1; } +| n447 "447" { assert ($1 == 447); $$ = $1; } +| n448 "448" { assert ($1 == 448); $$ = $1; } +| n449 "449" { assert ($1 == 449); $$ = $1; } +| n450 "450" { assert ($1 == 450); $$ = $1; } +| n451 "451" { assert ($1 == 451); $$ = $1; } +| n452 "452" { assert ($1 == 452); $$ = $1; } +| n453 "453" { assert ($1 == 453); $$ = $1; } +| n454 "454" { assert ($1 == 454); $$ = $1; } +| n455 "455" { assert ($1 == 455); $$ = $1; } +| n456 "456" { assert ($1 == 456); $$ = $1; } +| n457 "457" { assert ($1 == 457); $$ = $1; } +| n458 "458" { assert ($1 == 458); $$ = $1; } +| n459 "459" { assert ($1 == 459); $$ = $1; } +| n460 "460" { assert ($1 == 460); $$ = $1; } +| n461 "461" { assert ($1 == 461); $$ = $1; } +| n462 "462" { assert ($1 == 462); $$ = $1; } +| n463 "463" { assert ($1 == 463); $$ = $1; } +| n464 "464" { assert ($1 == 464); $$ = $1; } +| n465 "465" { assert ($1 == 465); $$ = $1; } +| n466 "466" { assert ($1 == 466); $$ = $1; } +| n467 "467" { assert ($1 == 467); $$ = $1; } +| n468 "468" { assert ($1 == 468); $$ = $1; } +| n469 "469" { assert ($1 == 469); $$ = $1; } +| n470 "470" { assert ($1 == 470); $$ = $1; } +| n471 "471" { assert ($1 == 471); $$ = $1; } +| n472 "472" { assert ($1 == 472); $$ = $1; } +| n473 "473" { assert ($1 == 473); $$ = $1; } +| n474 "474" { assert ($1 == 474); $$ = $1; } +| n475 "475" { assert ($1 == 475); $$ = $1; } +| n476 "476" { assert ($1 == 476); $$ = $1; } +| n477 "477" { assert ($1 == 477); $$ = $1; } +| n478 "478" { assert ($1 == 478); $$ = $1; } +| n479 "479" { assert ($1 == 479); $$ = $1; } +| n480 "480" { assert ($1 == 480); $$ = $1; } +| n481 "481" { assert ($1 == 481); $$ = $1; } +| n482 "482" { assert ($1 == 482); $$ = $1; } +| n483 "483" { assert ($1 == 483); $$ = $1; } +| n484 "484" { assert ($1 == 484); $$ = $1; } +| n485 "485" { assert ($1 == 485); $$ = $1; } +| n486 "486" { assert ($1 == 486); $$ = $1; } +| n487 "487" { assert ($1 == 487); $$ = $1; } +| n488 "488" { assert ($1 == 488); $$ = $1; } +| n489 "489" { assert ($1 == 489); $$ = $1; } +| n490 "490" { assert ($1 == 490); $$ = $1; } +| n491 "491" { assert ($1 == 491); $$ = $1; } +| n492 "492" { assert ($1 == 492); $$ = $1; } +| n493 "493" { assert ($1 == 493); $$ = $1; } +| n494 "494" { assert ($1 == 494); $$ = $1; } +| n495 "495" { assert ($1 == 495); $$ = $1; } +| n496 "496" { assert ($1 == 496); $$ = $1; } +| n497 "497" { assert ($1 == 497); $$ = $1; } +| n498 "498" { assert ($1 == 498); $$ = $1; } +| n499 "499" { assert ($1 == 499); $$ = $1; } +| n500 "500" { assert ($1 == 500); $$ = $1; } +| n501 "501" { assert ($1 == 501); $$ = $1; } +| n502 "502" { assert ($1 == 502); $$ = $1; } +| n503 "503" { assert ($1 == 503); $$ = $1; } +| n504 "504" { assert ($1 == 504); $$ = $1; } +| n505 "505" { assert ($1 == 505); $$ = $1; } +| n506 "506" { assert ($1 == 506); $$ = $1; } +| n507 "507" { assert ($1 == 507); $$ = $1; } +| n508 "508" { assert ($1 == 508); $$ = $1; } +| n509 "509" { assert ($1 == 509); $$ = $1; } +| n510 "510" { assert ($1 == 510); $$ = $1; } +| n511 "511" { assert ($1 == 511); $$ = $1; } +| n512 "512" { assert ($1 == 512); $$ = $1; } +| n513 "513" { assert ($1 == 513); $$ = $1; } +| n514 "514" { assert ($1 == 514); $$ = $1; } +| n515 "515" { assert ($1 == 515); $$ = $1; } +| n516 "516" { assert ($1 == 516); $$ = $1; } +| n517 "517" { assert ($1 == 517); $$ = $1; } +| n518 "518" { assert ($1 == 518); $$ = $1; } +| n519 "519" { assert ($1 == 519); $$ = $1; } +| n520 "520" { assert ($1 == 520); $$ = $1; } +| n521 "521" { assert ($1 == 521); $$ = $1; } +| n522 "522" { assert ($1 == 522); $$ = $1; } +| n523 "523" { assert ($1 == 523); $$ = $1; } +| n524 "524" { assert ($1 == 524); $$ = $1; } +| n525 "525" { assert ($1 == 525); $$ = $1; } +| n526 "526" { assert ($1 == 526); $$ = $1; } +| n527 "527" { assert ($1 == 527); $$ = $1; } +| n528 "528" { assert ($1 == 528); $$ = $1; } +| n529 "529" { assert ($1 == 529); $$ = $1; } +| n530 "530" { assert ($1 == 530); $$ = $1; } +| n531 "531" { assert ($1 == 531); $$ = $1; } +| n532 "532" { assert ($1 == 532); $$ = $1; } +| n533 "533" { assert ($1 == 533); $$ = $1; } +| n534 "534" { assert ($1 == 534); $$ = $1; } +| n535 "535" { assert ($1 == 535); $$ = $1; } +| n536 "536" { assert ($1 == 536); $$ = $1; } +| n537 "537" { assert ($1 == 537); $$ = $1; } +| n538 "538" { assert ($1 == 538); $$ = $1; } +| n539 "539" { assert ($1 == 539); $$ = $1; } +| n540 "540" { assert ($1 == 540); $$ = $1; } +| n541 "541" { assert ($1 == 541); $$ = $1; } +| n542 "542" { assert ($1 == 542); $$ = $1; } +| n543 "543" { assert ($1 == 543); $$ = $1; } +| n544 "544" { assert ($1 == 544); $$ = $1; } +| n545 "545" { assert ($1 == 545); $$ = $1; } +| n546 "546" { assert ($1 == 546); $$ = $1; } +| n547 "547" { assert ($1 == 547); $$ = $1; } +| n548 "548" { assert ($1 == 548); $$ = $1; } +| n549 "549" { assert ($1 == 549); $$ = $1; } +| n550 "550" { assert ($1 == 550); $$ = $1; } +| n551 "551" { assert ($1 == 551); $$ = $1; } +| n552 "552" { assert ($1 == 552); $$ = $1; } +| n553 "553" { assert ($1 == 553); $$ = $1; } +| n554 "554" { assert ($1 == 554); $$ = $1; } +| n555 "555" { assert ($1 == 555); $$ = $1; } +| n556 "556" { assert ($1 == 556); $$ = $1; } +| n557 "557" { assert ($1 == 557); $$ = $1; } +| n558 "558" { assert ($1 == 558); $$ = $1; } +| n559 "559" { assert ($1 == 559); $$ = $1; } +| n560 "560" { assert ($1 == 560); $$ = $1; } +| n561 "561" { assert ($1 == 561); $$ = $1; } +| n562 "562" { assert ($1 == 562); $$ = $1; } +| n563 "563" { assert ($1 == 563); $$ = $1; } +| n564 "564" { assert ($1 == 564); $$ = $1; } +| n565 "565" { assert ($1 == 565); $$ = $1; } +| n566 "566" { assert ($1 == 566); $$ = $1; } +| n567 "567" { assert ($1 == 567); $$ = $1; } +| n568 "568" { assert ($1 == 568); $$ = $1; } +| n569 "569" { assert ($1 == 569); $$ = $1; } +| n570 "570" { assert ($1 == 570); $$ = $1; } +| n571 "571" { assert ($1 == 571); $$ = $1; } +| n572 "572" { assert ($1 == 572); $$ = $1; } +| n573 "573" { assert ($1 == 573); $$ = $1; } +| n574 "574" { assert ($1 == 574); $$ = $1; } +| n575 "575" { assert ($1 == 575); $$ = $1; } +| n576 "576" { assert ($1 == 576); $$ = $1; } +| n577 "577" { assert ($1 == 577); $$ = $1; } +| n578 "578" { assert ($1 == 578); $$ = $1; } +| n579 "579" { assert ($1 == 579); $$ = $1; } +| n580 "580" { assert ($1 == 580); $$ = $1; } +| n581 "581" { assert ($1 == 581); $$ = $1; } +| n582 "582" { assert ($1 == 582); $$ = $1; } +| n583 "583" { assert ($1 == 583); $$ = $1; } +| n584 "584" { assert ($1 == 584); $$ = $1; } +| n585 "585" { assert ($1 == 585); $$ = $1; } +| n586 "586" { assert ($1 == 586); $$ = $1; } +| n587 "587" { assert ($1 == 587); $$ = $1; } +| n588 "588" { assert ($1 == 588); $$ = $1; } +| n589 "589" { assert ($1 == 589); $$ = $1; } +| n590 "590" { assert ($1 == 590); $$ = $1; } +| n591 "591" { assert ($1 == 591); $$ = $1; } +| n592 "592" { assert ($1 == 592); $$ = $1; } +| n593 "593" { assert ($1 == 593); $$ = $1; } +| n594 "594" { assert ($1 == 594); $$ = $1; } +| n595 "595" { assert ($1 == 595); $$ = $1; } +| n596 "596" { assert ($1 == 596); $$ = $1; } +| n597 "597" { assert ($1 == 597); $$ = $1; } +| n598 "598" { assert ($1 == 598); $$ = $1; } +| n599 "599" { assert ($1 == 599); $$ = $1; } +| n600 "600" { assert ($1 == 600); $$ = $1; } +| n601 "601" { assert ($1 == 601); $$ = $1; } +| n602 "602" { assert ($1 == 602); $$ = $1; } +| n603 "603" { assert ($1 == 603); $$ = $1; } +| n604 "604" { assert ($1 == 604); $$ = $1; } +| n605 "605" { assert ($1 == 605); $$ = $1; } +| n606 "606" { assert ($1 == 606); $$ = $1; } +| n607 "607" { assert ($1 == 607); $$ = $1; } +| n608 "608" { assert ($1 == 608); $$ = $1; } +| n609 "609" { assert ($1 == 609); $$ = $1; } +| n610 "610" { assert ($1 == 610); $$ = $1; } +| n611 "611" { assert ($1 == 611); $$ = $1; } +| n612 "612" { assert ($1 == 612); $$ = $1; } +| n613 "613" { assert ($1 == 613); $$ = $1; } +| n614 "614" { assert ($1 == 614); $$ = $1; } +| n615 "615" { assert ($1 == 615); $$ = $1; } +| n616 "616" { assert ($1 == 616); $$ = $1; } +| n617 "617" { assert ($1 == 617); $$ = $1; } +| n618 "618" { assert ($1 == 618); $$ = $1; } +| n619 "619" { assert ($1 == 619); $$ = $1; } +| n620 "620" { assert ($1 == 620); $$ = $1; } +| n621 "621" { assert ($1 == 621); $$ = $1; } +| n622 "622" { assert ($1 == 622); $$ = $1; } +| n623 "623" { assert ($1 == 623); $$ = $1; } +| n624 "624" { assert ($1 == 624); $$ = $1; } +| n625 "625" { assert ($1 == 625); $$ = $1; } +| n626 "626" { assert ($1 == 626); $$ = $1; } +| n627 "627" { assert ($1 == 627); $$ = $1; } +| n628 "628" { assert ($1 == 628); $$ = $1; } +| n629 "629" { assert ($1 == 629); $$ = $1; } +| n630 "630" { assert ($1 == 630); $$ = $1; } +| n631 "631" { assert ($1 == 631); $$ = $1; } +| n632 "632" { assert ($1 == 632); $$ = $1; } +| n633 "633" { assert ($1 == 633); $$ = $1; } +| n634 "634" { assert ($1 == 634); $$ = $1; } +| n635 "635" { assert ($1 == 635); $$ = $1; } +| n636 "636" { assert ($1 == 636); $$ = $1; } +| n637 "637" { assert ($1 == 637); $$ = $1; } +| n638 "638" { assert ($1 == 638); $$ = $1; } +| n639 "639" { assert ($1 == 639); $$ = $1; } +| n640 "640" { assert ($1 == 640); $$ = $1; } +| n641 "641" { assert ($1 == 641); $$ = $1; } +| n642 "642" { assert ($1 == 642); $$ = $1; } +| n643 "643" { assert ($1 == 643); $$ = $1; } +| n644 "644" { assert ($1 == 644); $$ = $1; } +| n645 "645" { assert ($1 == 645); $$ = $1; } +| n646 "646" { assert ($1 == 646); $$ = $1; } +| n647 "647" { assert ($1 == 647); $$ = $1; } +| n648 "648" { assert ($1 == 648); $$ = $1; } +| n649 "649" { assert ($1 == 649); $$ = $1; } +| n650 "650" { assert ($1 == 650); $$ = $1; } +| n651 "651" { assert ($1 == 651); $$ = $1; } +| n652 "652" { assert ($1 == 652); $$ = $1; } +| n653 "653" { assert ($1 == 653); $$ = $1; } +| n654 "654" { assert ($1 == 654); $$ = $1; } +| n655 "655" { assert ($1 == 655); $$ = $1; } +| n656 "656" { assert ($1 == 656); $$ = $1; } +| n657 "657" { assert ($1 == 657); $$ = $1; } +| n658 "658" { assert ($1 == 658); $$ = $1; } +| n659 "659" { assert ($1 == 659); $$ = $1; } +| n660 "660" { assert ($1 == 660); $$ = $1; } +| n661 "661" { assert ($1 == 661); $$ = $1; } +| n662 "662" { assert ($1 == 662); $$ = $1; } +| n663 "663" { assert ($1 == 663); $$ = $1; } +| n664 "664" { assert ($1 == 664); $$ = $1; } +| n665 "665" { assert ($1 == 665); $$ = $1; } +| n666 "666" { assert ($1 == 666); $$ = $1; } +| n667 "667" { assert ($1 == 667); $$ = $1; } +| n668 "668" { assert ($1 == 668); $$ = $1; } +| n669 "669" { assert ($1 == 669); $$ = $1; } +| n670 "670" { assert ($1 == 670); $$ = $1; } +| n671 "671" { assert ($1 == 671); $$ = $1; } +| n672 "672" { assert ($1 == 672); $$ = $1; } +| n673 "673" { assert ($1 == 673); $$ = $1; } +| n674 "674" { assert ($1 == 674); $$ = $1; } +| n675 "675" { assert ($1 == 675); $$ = $1; } +| n676 "676" { assert ($1 == 676); $$ = $1; } +| n677 "677" { assert ($1 == 677); $$ = $1; } +| n678 "678" { assert ($1 == 678); $$ = $1; } +| n679 "679" { assert ($1 == 679); $$ = $1; } +| n680 "680" { assert ($1 == 680); $$ = $1; } +| n681 "681" { assert ($1 == 681); $$ = $1; } +| n682 "682" { assert ($1 == 682); $$ = $1; } +| n683 "683" { assert ($1 == 683); $$ = $1; } +| n684 "684" { assert ($1 == 684); $$ = $1; } +| n685 "685" { assert ($1 == 685); $$ = $1; } +| n686 "686" { assert ($1 == 686); $$ = $1; } +| n687 "687" { assert ($1 == 687); $$ = $1; } +| n688 "688" { assert ($1 == 688); $$ = $1; } +| n689 "689" { assert ($1 == 689); $$ = $1; } +| n690 "690" { assert ($1 == 690); $$ = $1; } +| n691 "691" { assert ($1 == 691); $$ = $1; } +| n692 "692" { assert ($1 == 692); $$ = $1; } +| n693 "693" { assert ($1 == 693); $$ = $1; } +| n694 "694" { assert ($1 == 694); $$ = $1; } +| n695 "695" { assert ($1 == 695); $$ = $1; } +| n696 "696" { assert ($1 == 696); $$ = $1; } +| n697 "697" { assert ($1 == 697); $$ = $1; } +| n698 "698" { assert ($1 == 698); $$ = $1; } +| n699 "699" { assert ($1 == 699); $$ = $1; } +| n700 "700" { assert ($1 == 700); $$ = $1; } +| n701 "701" { assert ($1 == 701); $$ = $1; } +| n702 "702" { assert ($1 == 702); $$ = $1; } +| n703 "703" { assert ($1 == 703); $$ = $1; } +| n704 "704" { assert ($1 == 704); $$ = $1; } +| n705 "705" { assert ($1 == 705); $$ = $1; } +| n706 "706" { assert ($1 == 706); $$ = $1; } +| n707 "707" { assert ($1 == 707); $$ = $1; } +| n708 "708" { assert ($1 == 708); $$ = $1; } +| n709 "709" { assert ($1 == 709); $$ = $1; } +| n710 "710" { assert ($1 == 710); $$ = $1; } +| n711 "711" { assert ($1 == 711); $$ = $1; } +| n712 "712" { assert ($1 == 712); $$ = $1; } +| n713 "713" { assert ($1 == 713); $$ = $1; } +| n714 "714" { assert ($1 == 714); $$ = $1; } +| n715 "715" { assert ($1 == 715); $$ = $1; } +| n716 "716" { assert ($1 == 716); $$ = $1; } +| n717 "717" { assert ($1 == 717); $$ = $1; } +| n718 "718" { assert ($1 == 718); $$ = $1; } +| n719 "719" { assert ($1 == 719); $$ = $1; } +| n720 "720" { assert ($1 == 720); $$ = $1; } +| n721 "721" { assert ($1 == 721); $$ = $1; } +| n722 "722" { assert ($1 == 722); $$ = $1; } +| n723 "723" { assert ($1 == 723); $$ = $1; } +| n724 "724" { assert ($1 == 724); $$ = $1; } +| n725 "725" { assert ($1 == 725); $$ = $1; } +| n726 "726" { assert ($1 == 726); $$ = $1; } +| n727 "727" { assert ($1 == 727); $$ = $1; } +| n728 "728" { assert ($1 == 728); $$ = $1; } +| n729 "729" { assert ($1 == 729); $$ = $1; } +| n730 "730" { assert ($1 == 730); $$ = $1; } +| n731 "731" { assert ($1 == 731); $$ = $1; } +| n732 "732" { assert ($1 == 732); $$ = $1; } +| n733 "733" { assert ($1 == 733); $$ = $1; } +| n734 "734" { assert ($1 == 734); $$ = $1; } +| n735 "735" { assert ($1 == 735); $$ = $1; } +| n736 "736" { assert ($1 == 736); $$ = $1; } +| n737 "737" { assert ($1 == 737); $$ = $1; } +| n738 "738" { assert ($1 == 738); $$ = $1; } +| n739 "739" { assert ($1 == 739); $$ = $1; } +| n740 "740" { assert ($1 == 740); $$ = $1; } +| n741 "741" { assert ($1 == 741); $$ = $1; } +| n742 "742" { assert ($1 == 742); $$ = $1; } +| n743 "743" { assert ($1 == 743); $$ = $1; } +| n744 "744" { assert ($1 == 744); $$ = $1; } +| n745 "745" { assert ($1 == 745); $$ = $1; } +| n746 "746" { assert ($1 == 746); $$ = $1; } +| n747 "747" { assert ($1 == 747); $$ = $1; } +| n748 "748" { assert ($1 == 748); $$ = $1; } +| n749 "749" { assert ($1 == 749); $$ = $1; } +| n750 "750" { assert ($1 == 750); $$ = $1; } +| n751 "751" { assert ($1 == 751); $$ = $1; } +| n752 "752" { assert ($1 == 752); $$ = $1; } +| n753 "753" { assert ($1 == 753); $$ = $1; } +| n754 "754" { assert ($1 == 754); $$ = $1; } +| n755 "755" { assert ($1 == 755); $$ = $1; } +| n756 "756" { assert ($1 == 756); $$ = $1; } +| n757 "757" { assert ($1 == 757); $$ = $1; } +| n758 "758" { assert ($1 == 758); $$ = $1; } +| n759 "759" { assert ($1 == 759); $$ = $1; } +| n760 "760" { assert ($1 == 760); $$ = $1; } +| n761 "761" { assert ($1 == 761); $$ = $1; } +| n762 "762" { assert ($1 == 762); $$ = $1; } +| n763 "763" { assert ($1 == 763); $$ = $1; } +| n764 "764" { assert ($1 == 764); $$ = $1; } +| n765 "765" { assert ($1 == 765); $$ = $1; } +| n766 "766" { assert ($1 == 766); $$ = $1; } +| n767 "767" { assert ($1 == 767); $$ = $1; } +| n768 "768" { assert ($1 == 768); $$ = $1; } +| n769 "769" { assert ($1 == 769); $$ = $1; } +| n770 "770" { assert ($1 == 770); $$ = $1; } +| n771 "771" { assert ($1 == 771); $$ = $1; } +| n772 "772" { assert ($1 == 772); $$ = $1; } +| n773 "773" { assert ($1 == 773); $$ = $1; } +| n774 "774" { assert ($1 == 774); $$ = $1; } +| n775 "775" { assert ($1 == 775); $$ = $1; } +| n776 "776" { assert ($1 == 776); $$ = $1; } +| n777 "777" { assert ($1 == 777); $$ = $1; } +| n778 "778" { assert ($1 == 778); $$ = $1; } +| n779 "779" { assert ($1 == 779); $$ = $1; } +| n780 "780" { assert ($1 == 780); $$ = $1; } +| n781 "781" { assert ($1 == 781); $$ = $1; } +| n782 "782" { assert ($1 == 782); $$ = $1; } +| n783 "783" { assert ($1 == 783); $$ = $1; } +| n784 "784" { assert ($1 == 784); $$ = $1; } +| n785 "785" { assert ($1 == 785); $$ = $1; } +| n786 "786" { assert ($1 == 786); $$ = $1; } +| n787 "787" { assert ($1 == 787); $$ = $1; } +| n788 "788" { assert ($1 == 788); $$ = $1; } +| n789 "789" { assert ($1 == 789); $$ = $1; } +| n790 "790" { assert ($1 == 790); $$ = $1; } +| n791 "791" { assert ($1 == 791); $$ = $1; } +| n792 "792" { assert ($1 == 792); $$ = $1; } +| n793 "793" { assert ($1 == 793); $$ = $1; } +| n794 "794" { assert ($1 == 794); $$ = $1; } +| n795 "795" { assert ($1 == 795); $$ = $1; } +| n796 "796" { assert ($1 == 796); $$ = $1; } +| n797 "797" { assert ($1 == 797); $$ = $1; } +| n798 "798" { assert ($1 == 798); $$ = $1; } +| n799 "799" { assert ($1 == 799); $$ = $1; } +| n800 "800" { assert ($1 == 800); $$ = $1; } +| n801 "801" { assert ($1 == 801); $$ = $1; } +| n802 "802" { assert ($1 == 802); $$ = $1; } +| n803 "803" { assert ($1 == 803); $$ = $1; } +| n804 "804" { assert ($1 == 804); $$ = $1; } +| n805 "805" { assert ($1 == 805); $$ = $1; } +| n806 "806" { assert ($1 == 806); $$ = $1; } +| n807 "807" { assert ($1 == 807); $$ = $1; } +| n808 "808" { assert ($1 == 808); $$ = $1; } +| n809 "809" { assert ($1 == 809); $$ = $1; } +| n810 "810" { assert ($1 == 810); $$ = $1; } +| n811 "811" { assert ($1 == 811); $$ = $1; } +| n812 "812" { assert ($1 == 812); $$ = $1; } +| n813 "813" { assert ($1 == 813); $$ = $1; } +| n814 "814" { assert ($1 == 814); $$ = $1; } +| n815 "815" { assert ($1 == 815); $$ = $1; } +| n816 "816" { assert ($1 == 816); $$ = $1; } +| n817 "817" { assert ($1 == 817); $$ = $1; } +| n818 "818" { assert ($1 == 818); $$ = $1; } +| n819 "819" { assert ($1 == 819); $$ = $1; } +| n820 "820" { assert ($1 == 820); $$ = $1; } +| n821 "821" { assert ($1 == 821); $$ = $1; } +| n822 "822" { assert ($1 == 822); $$ = $1; } +| n823 "823" { assert ($1 == 823); $$ = $1; } +| n824 "824" { assert ($1 == 824); $$ = $1; } +| n825 "825" { assert ($1 == 825); $$ = $1; } +| n826 "826" { assert ($1 == 826); $$ = $1; } +| n827 "827" { assert ($1 == 827); $$ = $1; } +| n828 "828" { assert ($1 == 828); $$ = $1; } +| n829 "829" { assert ($1 == 829); $$ = $1; } +| n830 "830" { assert ($1 == 830); $$ = $1; } +| n831 "831" { assert ($1 == 831); $$ = $1; } +| n832 "832" { assert ($1 == 832); $$ = $1; } +| n833 "833" { assert ($1 == 833); $$ = $1; } +| n834 "834" { assert ($1 == 834); $$ = $1; } +| n835 "835" { assert ($1 == 835); $$ = $1; } +| n836 "836" { assert ($1 == 836); $$ = $1; } +| n837 "837" { assert ($1 == 837); $$ = $1; } +| n838 "838" { assert ($1 == 838); $$ = $1; } +| n839 "839" { assert ($1 == 839); $$ = $1; } +| n840 "840" { assert ($1 == 840); $$ = $1; } +| n841 "841" { assert ($1 == 841); $$ = $1; } +| n842 "842" { assert ($1 == 842); $$ = $1; } +| n843 "843" { assert ($1 == 843); $$ = $1; } +| n844 "844" { assert ($1 == 844); $$ = $1; } +| n845 "845" { assert ($1 == 845); $$ = $1; } +| n846 "846" { assert ($1 == 846); $$ = $1; } +| n847 "847" { assert ($1 == 847); $$ = $1; } +| n848 "848" { assert ($1 == 848); $$ = $1; } +| n849 "849" { assert ($1 == 849); $$ = $1; } +| n850 "850" { assert ($1 == 850); $$ = $1; } +| n851 "851" { assert ($1 == 851); $$ = $1; } +| n852 "852" { assert ($1 == 852); $$ = $1; } +| n853 "853" { assert ($1 == 853); $$ = $1; } +| n854 "854" { assert ($1 == 854); $$ = $1; } +| n855 "855" { assert ($1 == 855); $$ = $1; } +| n856 "856" { assert ($1 == 856); $$ = $1; } +| n857 "857" { assert ($1 == 857); $$ = $1; } +| n858 "858" { assert ($1 == 858); $$ = $1; } +| n859 "859" { assert ($1 == 859); $$ = $1; } +| n860 "860" { assert ($1 == 860); $$ = $1; } +| n861 "861" { assert ($1 == 861); $$ = $1; } +| n862 "862" { assert ($1 == 862); $$ = $1; } +| n863 "863" { assert ($1 == 863); $$ = $1; } +| n864 "864" { assert ($1 == 864); $$ = $1; } +| n865 "865" { assert ($1 == 865); $$ = $1; } +| n866 "866" { assert ($1 == 866); $$ = $1; } +| n867 "867" { assert ($1 == 867); $$ = $1; } +| n868 "868" { assert ($1 == 868); $$ = $1; } +| n869 "869" { assert ($1 == 869); $$ = $1; } +| n870 "870" { assert ($1 == 870); $$ = $1; } +| n871 "871" { assert ($1 == 871); $$ = $1; } +| n872 "872" { assert ($1 == 872); $$ = $1; } +| n873 "873" { assert ($1 == 873); $$ = $1; } +| n874 "874" { assert ($1 == 874); $$ = $1; } +| n875 "875" { assert ($1 == 875); $$ = $1; } +| n876 "876" { assert ($1 == 876); $$ = $1; } +| n877 "877" { assert ($1 == 877); $$ = $1; } +| n878 "878" { assert ($1 == 878); $$ = $1; } +| n879 "879" { assert ($1 == 879); $$ = $1; } +| n880 "880" { assert ($1 == 880); $$ = $1; } +| n881 "881" { assert ($1 == 881); $$ = $1; } +| n882 "882" { assert ($1 == 882); $$ = $1; } +| n883 "883" { assert ($1 == 883); $$ = $1; } +| n884 "884" { assert ($1 == 884); $$ = $1; } +| n885 "885" { assert ($1 == 885); $$ = $1; } +| n886 "886" { assert ($1 == 886); $$ = $1; } +| n887 "887" { assert ($1 == 887); $$ = $1; } +| n888 "888" { assert ($1 == 888); $$ = $1; } +| n889 "889" { assert ($1 == 889); $$ = $1; } +| n890 "890" { assert ($1 == 890); $$ = $1; } +| n891 "891" { assert ($1 == 891); $$ = $1; } +| n892 "892" { assert ($1 == 892); $$ = $1; } +| n893 "893" { assert ($1 == 893); $$ = $1; } +| n894 "894" { assert ($1 == 894); $$ = $1; } +| n895 "895" { assert ($1 == 895); $$ = $1; } +| n896 "896" { assert ($1 == 896); $$ = $1; } +| n897 "897" { assert ($1 == 897); $$ = $1; } +| n898 "898" { assert ($1 == 898); $$ = $1; } +| n899 "899" { assert ($1 == 899); $$ = $1; } +| n900 "900" { assert ($1 == 900); $$ = $1; } +| n901 "901" { assert ($1 == 901); $$ = $1; } +| n902 "902" { assert ($1 == 902); $$ = $1; } +| n903 "903" { assert ($1 == 903); $$ = $1; } +| n904 "904" { assert ($1 == 904); $$ = $1; } +| n905 "905" { assert ($1 == 905); $$ = $1; } +| n906 "906" { assert ($1 == 906); $$ = $1; } +| n907 "907" { assert ($1 == 907); $$ = $1; } +| n908 "908" { assert ($1 == 908); $$ = $1; } +| n909 "909" { assert ($1 == 909); $$ = $1; } +| n910 "910" { assert ($1 == 910); $$ = $1; } +| n911 "911" { assert ($1 == 911); $$ = $1; } +| n912 "912" { assert ($1 == 912); $$ = $1; } +| n913 "913" { assert ($1 == 913); $$ = $1; } +| n914 "914" { assert ($1 == 914); $$ = $1; } +| n915 "915" { assert ($1 == 915); $$ = $1; } +| n916 "916" { assert ($1 == 916); $$ = $1; } +| n917 "917" { assert ($1 == 917); $$ = $1; } +| n918 "918" { assert ($1 == 918); $$ = $1; } +| n919 "919" { assert ($1 == 919); $$ = $1; } +| n920 "920" { assert ($1 == 920); $$ = $1; } +| n921 "921" { assert ($1 == 921); $$ = $1; } +| n922 "922" { assert ($1 == 922); $$ = $1; } +| n923 "923" { assert ($1 == 923); $$ = $1; } +| n924 "924" { assert ($1 == 924); $$ = $1; } +| n925 "925" { assert ($1 == 925); $$ = $1; } +| n926 "926" { assert ($1 == 926); $$ = $1; } +| n927 "927" { assert ($1 == 927); $$ = $1; } +| n928 "928" { assert ($1 == 928); $$ = $1; } +| n929 "929" { assert ($1 == 929); $$ = $1; } +| n930 "930" { assert ($1 == 930); $$ = $1; } +| n931 "931" { assert ($1 == 931); $$ = $1; } +| n932 "932" { assert ($1 == 932); $$ = $1; } +| n933 "933" { assert ($1 == 933); $$ = $1; } +| n934 "934" { assert ($1 == 934); $$ = $1; } +| n935 "935" { assert ($1 == 935); $$ = $1; } +| n936 "936" { assert ($1 == 936); $$ = $1; } +| n937 "937" { assert ($1 == 937); $$ = $1; } +| n938 "938" { assert ($1 == 938); $$ = $1; } +| n939 "939" { assert ($1 == 939); $$ = $1; } +| n940 "940" { assert ($1 == 940); $$ = $1; } +| n941 "941" { assert ($1 == 941); $$ = $1; } +| n942 "942" { assert ($1 == 942); $$ = $1; } +| n943 "943" { assert ($1 == 943); $$ = $1; } +| n944 "944" { assert ($1 == 944); $$ = $1; } +| n945 "945" { assert ($1 == 945); $$ = $1; } +| n946 "946" { assert ($1 == 946); $$ = $1; } +| n947 "947" { assert ($1 == 947); $$ = $1; } +| n948 "948" { assert ($1 == 948); $$ = $1; } +| n949 "949" { assert ($1 == 949); $$ = $1; } +| n950 "950" { assert ($1 == 950); $$ = $1; } +| n951 "951" { assert ($1 == 951); $$ = $1; } +| n952 "952" { assert ($1 == 952); $$ = $1; } +| n953 "953" { assert ($1 == 953); $$ = $1; } +| n954 "954" { assert ($1 == 954); $$ = $1; } +| n955 "955" { assert ($1 == 955); $$ = $1; } +| n956 "956" { assert ($1 == 956); $$ = $1; } +| n957 "957" { assert ($1 == 957); $$ = $1; } +| n958 "958" { assert ($1 == 958); $$ = $1; } +| n959 "959" { assert ($1 == 959); $$ = $1; } +| n960 "960" { assert ($1 == 960); $$ = $1; } +| n961 "961" { assert ($1 == 961); $$ = $1; } +| n962 "962" { assert ($1 == 962); $$ = $1; } +| n963 "963" { assert ($1 == 963); $$ = $1; } +| n964 "964" { assert ($1 == 964); $$ = $1; } +| n965 "965" { assert ($1 == 965); $$ = $1; } +| n966 "966" { assert ($1 == 966); $$ = $1; } +| n967 "967" { assert ($1 == 967); $$ = $1; } +| n968 "968" { assert ($1 == 968); $$ = $1; } +| n969 "969" { assert ($1 == 969); $$ = $1; } +| n970 "970" { assert ($1 == 970); $$ = $1; } +| n971 "971" { assert ($1 == 971); $$ = $1; } +| n972 "972" { assert ($1 == 972); $$ = $1; } +| n973 "973" { assert ($1 == 973); $$ = $1; } +| n974 "974" { assert ($1 == 974); $$ = $1; } +| n975 "975" { assert ($1 == 975); $$ = $1; } +| n976 "976" { assert ($1 == 976); $$ = $1; } +| n977 "977" { assert ($1 == 977); $$ = $1; } +| n978 "978" { assert ($1 == 978); $$ = $1; } +| n979 "979" { assert ($1 == 979); $$ = $1; } +| n980 "980" { assert ($1 == 980); $$ = $1; } +| n981 "981" { assert ($1 == 981); $$ = $1; } +| n982 "982" { assert ($1 == 982); $$ = $1; } +| n983 "983" { assert ($1 == 983); $$ = $1; } +| n984 "984" { assert ($1 == 984); $$ = $1; } +| n985 "985" { assert ($1 == 985); $$ = $1; } +| n986 "986" { assert ($1 == 986); $$ = $1; } +| n987 "987" { assert ($1 == 987); $$ = $1; } +| n988 "988" { assert ($1 == 988); $$ = $1; } +| n989 "989" { assert ($1 == 989); $$ = $1; } +| n990 "990" { assert ($1 == 990); $$ = $1; } +| n991 "991" { assert ($1 == 991); $$ = $1; } +| n992 "992" { assert ($1 == 992); $$ = $1; } +| n993 "993" { assert ($1 == 993); $$ = $1; } +| n994 "994" { assert ($1 == 994); $$ = $1; } +| n995 "995" { assert ($1 == 995); $$ = $1; } +| n996 "996" { assert ($1 == 996); $$ = $1; } +| n997 "997" { assert ($1 == 997); $$ = $1; } +| n998 "998" { assert ($1 == 998); $$ = $1; } +| n999 "999" { assert ($1 == 999); $$ = $1; } +| n1000 "1000" { assert ($1 == 1000); $$ = $1; } +; +n1: token { $$ = 1; }; +n2: token { $$ = 2; }; +n3: token { $$ = 3; }; +n4: token { $$ = 4; }; +n5: token { $$ = 5; }; +n6: token { $$ = 6; }; +n7: token { $$ = 7; }; +n8: token { $$ = 8; }; +n9: token { $$ = 9; }; +n10: token { $$ = 10; }; +n11: token { $$ = 11; }; +n12: token { $$ = 12; }; +n13: token { $$ = 13; }; +n14: token { $$ = 14; }; +n15: token { $$ = 15; }; +n16: token { $$ = 16; }; +n17: token { $$ = 17; }; +n18: token { $$ = 18; }; +n19: token { $$ = 19; }; +n20: token { $$ = 20; }; +n21: token { $$ = 21; }; +n22: token { $$ = 22; }; +n23: token { $$ = 23; }; +n24: token { $$ = 24; }; +n25: token { $$ = 25; }; +n26: token { $$ = 26; }; +n27: token { $$ = 27; }; +n28: token { $$ = 28; }; +n29: token { $$ = 29; }; +n30: token { $$ = 30; }; +n31: token { $$ = 31; }; +n32: token { $$ = 32; }; +n33: token { $$ = 33; }; +n34: token { $$ = 34; }; +n35: token { $$ = 35; }; +n36: token { $$ = 36; }; +n37: token { $$ = 37; }; +n38: token { $$ = 38; }; +n39: token { $$ = 39; }; +n40: token { $$ = 40; }; +n41: token { $$ = 41; }; +n42: token { $$ = 42; }; +n43: token { $$ = 43; }; +n44: token { $$ = 44; }; +n45: token { $$ = 45; }; +n46: token { $$ = 46; }; +n47: token { $$ = 47; }; +n48: token { $$ = 48; }; +n49: token { $$ = 49; }; +n50: token { $$ = 50; }; +n51: token { $$ = 51; }; +n52: token { $$ = 52; }; +n53: token { $$ = 53; }; +n54: token { $$ = 54; }; +n55: token { $$ = 55; }; +n56: token { $$ = 56; }; +n57: token { $$ = 57; }; +n58: token { $$ = 58; }; +n59: token { $$ = 59; }; +n60: token { $$ = 60; }; +n61: token { $$ = 61; }; +n62: token { $$ = 62; }; +n63: token { $$ = 63; }; +n64: token { $$ = 64; }; +n65: token { $$ = 65; }; +n66: token { $$ = 66; }; +n67: token { $$ = 67; }; +n68: token { $$ = 68; }; +n69: token { $$ = 69; }; +n70: token { $$ = 70; }; +n71: token { $$ = 71; }; +n72: token { $$ = 72; }; +n73: token { $$ = 73; }; +n74: token { $$ = 74; }; +n75: token { $$ = 75; }; +n76: token { $$ = 76; }; +n77: token { $$ = 77; }; +n78: token { $$ = 78; }; +n79: token { $$ = 79; }; +n80: token { $$ = 80; }; +n81: token { $$ = 81; }; +n82: token { $$ = 82; }; +n83: token { $$ = 83; }; +n84: token { $$ = 84; }; +n85: token { $$ = 85; }; +n86: token { $$ = 86; }; +n87: token { $$ = 87; }; +n88: token { $$ = 88; }; +n89: token { $$ = 89; }; +n90: token { $$ = 90; }; +n91: token { $$ = 91; }; +n92: token { $$ = 92; }; +n93: token { $$ = 93; }; +n94: token { $$ = 94; }; +n95: token { $$ = 95; }; +n96: token { $$ = 96; }; +n97: token { $$ = 97; }; +n98: token { $$ = 98; }; +n99: token { $$ = 99; }; +n100: token { $$ = 100; }; +n101: token { $$ = 101; }; +n102: token { $$ = 102; }; +n103: token { $$ = 103; }; +n104: token { $$ = 104; }; +n105: token { $$ = 105; }; +n106: token { $$ = 106; }; +n107: token { $$ = 107; }; +n108: token { $$ = 108; }; +n109: token { $$ = 109; }; +n110: token { $$ = 110; }; +n111: token { $$ = 111; }; +n112: token { $$ = 112; }; +n113: token { $$ = 113; }; +n114: token { $$ = 114; }; +n115: token { $$ = 115; }; +n116: token { $$ = 116; }; +n117: token { $$ = 117; }; +n118: token { $$ = 118; }; +n119: token { $$ = 119; }; +n120: token { $$ = 120; }; +n121: token { $$ = 121; }; +n122: token { $$ = 122; }; +n123: token { $$ = 123; }; +n124: token { $$ = 124; }; +n125: token { $$ = 125; }; +n126: token { $$ = 126; }; +n127: token { $$ = 127; }; +n128: token { $$ = 128; }; +n129: token { $$ = 129; }; +n130: token { $$ = 130; }; +n131: token { $$ = 131; }; +n132: token { $$ = 132; }; +n133: token { $$ = 133; }; +n134: token { $$ = 134; }; +n135: token { $$ = 135; }; +n136: token { $$ = 136; }; +n137: token { $$ = 137; }; +n138: token { $$ = 138; }; +n139: token { $$ = 139; }; +n140: token { $$ = 140; }; +n141: token { $$ = 141; }; +n142: token { $$ = 142; }; +n143: token { $$ = 143; }; +n144: token { $$ = 144; }; +n145: token { $$ = 145; }; +n146: token { $$ = 146; }; +n147: token { $$ = 147; }; +n148: token { $$ = 148; }; +n149: token { $$ = 149; }; +n150: token { $$ = 150; }; +n151: token { $$ = 151; }; +n152: token { $$ = 152; }; +n153: token { $$ = 153; }; +n154: token { $$ = 154; }; +n155: token { $$ = 155; }; +n156: token { $$ = 156; }; +n157: token { $$ = 157; }; +n158: token { $$ = 158; }; +n159: token { $$ = 159; }; +n160: token { $$ = 160; }; +n161: token { $$ = 161; }; +n162: token { $$ = 162; }; +n163: token { $$ = 163; }; +n164: token { $$ = 164; }; +n165: token { $$ = 165; }; +n166: token { $$ = 166; }; +n167: token { $$ = 167; }; +n168: token { $$ = 168; }; +n169: token { $$ = 169; }; +n170: token { $$ = 170; }; +n171: token { $$ = 171; }; +n172: token { $$ = 172; }; +n173: token { $$ = 173; }; +n174: token { $$ = 174; }; +n175: token { $$ = 175; }; +n176: token { $$ = 176; }; +n177: token { $$ = 177; }; +n178: token { $$ = 178; }; +n179: token { $$ = 179; }; +n180: token { $$ = 180; }; +n181: token { $$ = 181; }; +n182: token { $$ = 182; }; +n183: token { $$ = 183; }; +n184: token { $$ = 184; }; +n185: token { $$ = 185; }; +n186: token { $$ = 186; }; +n187: token { $$ = 187; }; +n188: token { $$ = 188; }; +n189: token { $$ = 189; }; +n190: token { $$ = 190; }; +n191: token { $$ = 191; }; +n192: token { $$ = 192; }; +n193: token { $$ = 193; }; +n194: token { $$ = 194; }; +n195: token { $$ = 195; }; +n196: token { $$ = 196; }; +n197: token { $$ = 197; }; +n198: token { $$ = 198; }; +n199: token { $$ = 199; }; +n200: token { $$ = 200; }; +n201: token { $$ = 201; }; +n202: token { $$ = 202; }; +n203: token { $$ = 203; }; +n204: token { $$ = 204; }; +n205: token { $$ = 205; }; +n206: token { $$ = 206; }; +n207: token { $$ = 207; }; +n208: token { $$ = 208; }; +n209: token { $$ = 209; }; +n210: token { $$ = 210; }; +n211: token { $$ = 211; }; +n212: token { $$ = 212; }; +n213: token { $$ = 213; }; +n214: token { $$ = 214; }; +n215: token { $$ = 215; }; +n216: token { $$ = 216; }; +n217: token { $$ = 217; }; +n218: token { $$ = 218; }; +n219: token { $$ = 219; }; +n220: token { $$ = 220; }; +n221: token { $$ = 221; }; +n222: token { $$ = 222; }; +n223: token { $$ = 223; }; +n224: token { $$ = 224; }; +n225: token { $$ = 225; }; +n226: token { $$ = 226; }; +n227: token { $$ = 227; }; +n228: token { $$ = 228; }; +n229: token { $$ = 229; }; +n230: token { $$ = 230; }; +n231: token { $$ = 231; }; +n232: token { $$ = 232; }; +n233: token { $$ = 233; }; +n234: token { $$ = 234; }; +n235: token { $$ = 235; }; +n236: token { $$ = 236; }; +n237: token { $$ = 237; }; +n238: token { $$ = 238; }; +n239: token { $$ = 239; }; +n240: token { $$ = 240; }; +n241: token { $$ = 241; }; +n242: token { $$ = 242; }; +n243: token { $$ = 243; }; +n244: token { $$ = 244; }; +n245: token { $$ = 245; }; +n246: token { $$ = 246; }; +n247: token { $$ = 247; }; +n248: token { $$ = 248; }; +n249: token { $$ = 249; }; +n250: token { $$ = 250; }; +n251: token { $$ = 251; }; +n252: token { $$ = 252; }; +n253: token { $$ = 253; }; +n254: token { $$ = 254; }; +n255: token { $$ = 255; }; +n256: token { $$ = 256; }; +n257: token { $$ = 257; }; +n258: token { $$ = 258; }; +n259: token { $$ = 259; }; +n260: token { $$ = 260; }; +n261: token { $$ = 261; }; +n262: token { $$ = 262; }; +n263: token { $$ = 263; }; +n264: token { $$ = 264; }; +n265: token { $$ = 265; }; +n266: token { $$ = 266; }; +n267: token { $$ = 267; }; +n268: token { $$ = 268; }; +n269: token { $$ = 269; }; +n270: token { $$ = 270; }; +n271: token { $$ = 271; }; +n272: token { $$ = 272; }; +n273: token { $$ = 273; }; +n274: token { $$ = 274; }; +n275: token { $$ = 275; }; +n276: token { $$ = 276; }; +n277: token { $$ = 277; }; +n278: token { $$ = 278; }; +n279: token { $$ = 279; }; +n280: token { $$ = 280; }; +n281: token { $$ = 281; }; +n282: token { $$ = 282; }; +n283: token { $$ = 283; }; +n284: token { $$ = 284; }; +n285: token { $$ = 285; }; +n286: token { $$ = 286; }; +n287: token { $$ = 287; }; +n288: token { $$ = 288; }; +n289: token { $$ = 289; }; +n290: token { $$ = 290; }; +n291: token { $$ = 291; }; +n292: token { $$ = 292; }; +n293: token { $$ = 293; }; +n294: token { $$ = 294; }; +n295: token { $$ = 295; }; +n296: token { $$ = 296; }; +n297: token { $$ = 297; }; +n298: token { $$ = 298; }; +n299: token { $$ = 299; }; +n300: token { $$ = 300; }; +n301: token { $$ = 301; }; +n302: token { $$ = 302; }; +n303: token { $$ = 303; }; +n304: token { $$ = 304; }; +n305: token { $$ = 305; }; +n306: token { $$ = 306; }; +n307: token { $$ = 307; }; +n308: token { $$ = 308; }; +n309: token { $$ = 309; }; +n310: token { $$ = 310; }; +n311: token { $$ = 311; }; +n312: token { $$ = 312; }; +n313: token { $$ = 313; }; +n314: token { $$ = 314; }; +n315: token { $$ = 315; }; +n316: token { $$ = 316; }; +n317: token { $$ = 317; }; +n318: token { $$ = 318; }; +n319: token { $$ = 319; }; +n320: token { $$ = 320; }; +n321: token { $$ = 321; }; +n322: token { $$ = 322; }; +n323: token { $$ = 323; }; +n324: token { $$ = 324; }; +n325: token { $$ = 325; }; +n326: token { $$ = 326; }; +n327: token { $$ = 327; }; +n328: token { $$ = 328; }; +n329: token { $$ = 329; }; +n330: token { $$ = 330; }; +n331: token { $$ = 331; }; +n332: token { $$ = 332; }; +n333: token { $$ = 333; }; +n334: token { $$ = 334; }; +n335: token { $$ = 335; }; +n336: token { $$ = 336; }; +n337: token { $$ = 337; }; +n338: token { $$ = 338; }; +n339: token { $$ = 339; }; +n340: token { $$ = 340; }; +n341: token { $$ = 341; }; +n342: token { $$ = 342; }; +n343: token { $$ = 343; }; +n344: token { $$ = 344; }; +n345: token { $$ = 345; }; +n346: token { $$ = 346; }; +n347: token { $$ = 347; }; +n348: token { $$ = 348; }; +n349: token { $$ = 349; }; +n350: token { $$ = 350; }; +n351: token { $$ = 351; }; +n352: token { $$ = 352; }; +n353: token { $$ = 353; }; +n354: token { $$ = 354; }; +n355: token { $$ = 355; }; +n356: token { $$ = 356; }; +n357: token { $$ = 357; }; +n358: token { $$ = 358; }; +n359: token { $$ = 359; }; +n360: token { $$ = 360; }; +n361: token { $$ = 361; }; +n362: token { $$ = 362; }; +n363: token { $$ = 363; }; +n364: token { $$ = 364; }; +n365: token { $$ = 365; }; +n366: token { $$ = 366; }; +n367: token { $$ = 367; }; +n368: token { $$ = 368; }; +n369: token { $$ = 369; }; +n370: token { $$ = 370; }; +n371: token { $$ = 371; }; +n372: token { $$ = 372; }; +n373: token { $$ = 373; }; +n374: token { $$ = 374; }; +n375: token { $$ = 375; }; +n376: token { $$ = 376; }; +n377: token { $$ = 377; }; +n378: token { $$ = 378; }; +n379: token { $$ = 379; }; +n380: token { $$ = 380; }; +n381: token { $$ = 381; }; +n382: token { $$ = 382; }; +n383: token { $$ = 383; }; +n384: token { $$ = 384; }; +n385: token { $$ = 385; }; +n386: token { $$ = 386; }; +n387: token { $$ = 387; }; +n388: token { $$ = 388; }; +n389: token { $$ = 389; }; +n390: token { $$ = 390; }; +n391: token { $$ = 391; }; +n392: token { $$ = 392; }; +n393: token { $$ = 393; }; +n394: token { $$ = 394; }; +n395: token { $$ = 395; }; +n396: token { $$ = 396; }; +n397: token { $$ = 397; }; +n398: token { $$ = 398; }; +n399: token { $$ = 399; }; +n400: token { $$ = 400; }; +n401: token { $$ = 401; }; +n402: token { $$ = 402; }; +n403: token { $$ = 403; }; +n404: token { $$ = 404; }; +n405: token { $$ = 405; }; +n406: token { $$ = 406; }; +n407: token { $$ = 407; }; +n408: token { $$ = 408; }; +n409: token { $$ = 409; }; +n410: token { $$ = 410; }; +n411: token { $$ = 411; }; +n412: token { $$ = 412; }; +n413: token { $$ = 413; }; +n414: token { $$ = 414; }; +n415: token { $$ = 415; }; +n416: token { $$ = 416; }; +n417: token { $$ = 417; }; +n418: token { $$ = 418; }; +n419: token { $$ = 419; }; +n420: token { $$ = 420; }; +n421: token { $$ = 421; }; +n422: token { $$ = 422; }; +n423: token { $$ = 423; }; +n424: token { $$ = 424; }; +n425: token { $$ = 425; }; +n426: token { $$ = 426; }; +n427: token { $$ = 427; }; +n428: token { $$ = 428; }; +n429: token { $$ = 429; }; +n430: token { $$ = 430; }; +n431: token { $$ = 431; }; +n432: token { $$ = 432; }; +n433: token { $$ = 433; }; +n434: token { $$ = 434; }; +n435: token { $$ = 435; }; +n436: token { $$ = 436; }; +n437: token { $$ = 437; }; +n438: token { $$ = 438; }; +n439: token { $$ = 439; }; +n440: token { $$ = 440; }; +n441: token { $$ = 441; }; +n442: token { $$ = 442; }; +n443: token { $$ = 443; }; +n444: token { $$ = 444; }; +n445: token { $$ = 445; }; +n446: token { $$ = 446; }; +n447: token { $$ = 447; }; +n448: token { $$ = 448; }; +n449: token { $$ = 449; }; +n450: token { $$ = 450; }; +n451: token { $$ = 451; }; +n452: token { $$ = 452; }; +n453: token { $$ = 453; }; +n454: token { $$ = 454; }; +n455: token { $$ = 455; }; +n456: token { $$ = 456; }; +n457: token { $$ = 457; }; +n458: token { $$ = 458; }; +n459: token { $$ = 459; }; +n460: token { $$ = 460; }; +n461: token { $$ = 461; }; +n462: token { $$ = 462; }; +n463: token { $$ = 463; }; +n464: token { $$ = 464; }; +n465: token { $$ = 465; }; +n466: token { $$ = 466; }; +n467: token { $$ = 467; }; +n468: token { $$ = 468; }; +n469: token { $$ = 469; }; +n470: token { $$ = 470; }; +n471: token { $$ = 471; }; +n472: token { $$ = 472; }; +n473: token { $$ = 473; }; +n474: token { $$ = 474; }; +n475: token { $$ = 475; }; +n476: token { $$ = 476; }; +n477: token { $$ = 477; }; +n478: token { $$ = 478; }; +n479: token { $$ = 479; }; +n480: token { $$ = 480; }; +n481: token { $$ = 481; }; +n482: token { $$ = 482; }; +n483: token { $$ = 483; }; +n484: token { $$ = 484; }; +n485: token { $$ = 485; }; +n486: token { $$ = 486; }; +n487: token { $$ = 487; }; +n488: token { $$ = 488; }; +n489: token { $$ = 489; }; +n490: token { $$ = 490; }; +n491: token { $$ = 491; }; +n492: token { $$ = 492; }; +n493: token { $$ = 493; }; +n494: token { $$ = 494; }; +n495: token { $$ = 495; }; +n496: token { $$ = 496; }; +n497: token { $$ = 497; }; +n498: token { $$ = 498; }; +n499: token { $$ = 499; }; +n500: token { $$ = 500; }; +n501: token { $$ = 501; }; +n502: token { $$ = 502; }; +n503: token { $$ = 503; }; +n504: token { $$ = 504; }; +n505: token { $$ = 505; }; +n506: token { $$ = 506; }; +n507: token { $$ = 507; }; +n508: token { $$ = 508; }; +n509: token { $$ = 509; }; +n510: token { $$ = 510; }; +n511: token { $$ = 511; }; +n512: token { $$ = 512; }; +n513: token { $$ = 513; }; +n514: token { $$ = 514; }; +n515: token { $$ = 515; }; +n516: token { $$ = 516; }; +n517: token { $$ = 517; }; +n518: token { $$ = 518; }; +n519: token { $$ = 519; }; +n520: token { $$ = 520; }; +n521: token { $$ = 521; }; +n522: token { $$ = 522; }; +n523: token { $$ = 523; }; +n524: token { $$ = 524; }; +n525: token { $$ = 525; }; +n526: token { $$ = 526; }; +n527: token { $$ = 527; }; +n528: token { $$ = 528; }; +n529: token { $$ = 529; }; +n530: token { $$ = 530; }; +n531: token { $$ = 531; }; +n532: token { $$ = 532; }; +n533: token { $$ = 533; }; +n534: token { $$ = 534; }; +n535: token { $$ = 535; }; +n536: token { $$ = 536; }; +n537: token { $$ = 537; }; +n538: token { $$ = 538; }; +n539: token { $$ = 539; }; +n540: token { $$ = 540; }; +n541: token { $$ = 541; }; +n542: token { $$ = 542; }; +n543: token { $$ = 543; }; +n544: token { $$ = 544; }; +n545: token { $$ = 545; }; +n546: token { $$ = 546; }; +n547: token { $$ = 547; }; +n548: token { $$ = 548; }; +n549: token { $$ = 549; }; +n550: token { $$ = 550; }; +n551: token { $$ = 551; }; +n552: token { $$ = 552; }; +n553: token { $$ = 553; }; +n554: token { $$ = 554; }; +n555: token { $$ = 555; }; +n556: token { $$ = 556; }; +n557: token { $$ = 557; }; +n558: token { $$ = 558; }; +n559: token { $$ = 559; }; +n560: token { $$ = 560; }; +n561: token { $$ = 561; }; +n562: token { $$ = 562; }; +n563: token { $$ = 563; }; +n564: token { $$ = 564; }; +n565: token { $$ = 565; }; +n566: token { $$ = 566; }; +n567: token { $$ = 567; }; +n568: token { $$ = 568; }; +n569: token { $$ = 569; }; +n570: token { $$ = 570; }; +n571: token { $$ = 571; }; +n572: token { $$ = 572; }; +n573: token { $$ = 573; }; +n574: token { $$ = 574; }; +n575: token { $$ = 575; }; +n576: token { $$ = 576; }; +n577: token { $$ = 577; }; +n578: token { $$ = 578; }; +n579: token { $$ = 579; }; +n580: token { $$ = 580; }; +n581: token { $$ = 581; }; +n582: token { $$ = 582; }; +n583: token { $$ = 583; }; +n584: token { $$ = 584; }; +n585: token { $$ = 585; }; +n586: token { $$ = 586; }; +n587: token { $$ = 587; }; +n588: token { $$ = 588; }; +n589: token { $$ = 589; }; +n590: token { $$ = 590; }; +n591: token { $$ = 591; }; +n592: token { $$ = 592; }; +n593: token { $$ = 593; }; +n594: token { $$ = 594; }; +n595: token { $$ = 595; }; +n596: token { $$ = 596; }; +n597: token { $$ = 597; }; +n598: token { $$ = 598; }; +n599: token { $$ = 599; }; +n600: token { $$ = 600; }; +n601: token { $$ = 601; }; +n602: token { $$ = 602; }; +n603: token { $$ = 603; }; +n604: token { $$ = 604; }; +n605: token { $$ = 605; }; +n606: token { $$ = 606; }; +n607: token { $$ = 607; }; +n608: token { $$ = 608; }; +n609: token { $$ = 609; }; +n610: token { $$ = 610; }; +n611: token { $$ = 611; }; +n612: token { $$ = 612; }; +n613: token { $$ = 613; }; +n614: token { $$ = 614; }; +n615: token { $$ = 615; }; +n616: token { $$ = 616; }; +n617: token { $$ = 617; }; +n618: token { $$ = 618; }; +n619: token { $$ = 619; }; +n620: token { $$ = 620; }; +n621: token { $$ = 621; }; +n622: token { $$ = 622; }; +n623: token { $$ = 623; }; +n624: token { $$ = 624; }; +n625: token { $$ = 625; }; +n626: token { $$ = 626; }; +n627: token { $$ = 627; }; +n628: token { $$ = 628; }; +n629: token { $$ = 629; }; +n630: token { $$ = 630; }; +n631: token { $$ = 631; }; +n632: token { $$ = 632; }; +n633: token { $$ = 633; }; +n634: token { $$ = 634; }; +n635: token { $$ = 635; }; +n636: token { $$ = 636; }; +n637: token { $$ = 637; }; +n638: token { $$ = 638; }; +n639: token { $$ = 639; }; +n640: token { $$ = 640; }; +n641: token { $$ = 641; }; +n642: token { $$ = 642; }; +n643: token { $$ = 643; }; +n644: token { $$ = 644; }; +n645: token { $$ = 645; }; +n646: token { $$ = 646; }; +n647: token { $$ = 647; }; +n648: token { $$ = 648; }; +n649: token { $$ = 649; }; +n650: token { $$ = 650; }; +n651: token { $$ = 651; }; +n652: token { $$ = 652; }; +n653: token { $$ = 653; }; +n654: token { $$ = 654; }; +n655: token { $$ = 655; }; +n656: token { $$ = 656; }; +n657: token { $$ = 657; }; +n658: token { $$ = 658; }; +n659: token { $$ = 659; }; +n660: token { $$ = 660; }; +n661: token { $$ = 661; }; +n662: token { $$ = 662; }; +n663: token { $$ = 663; }; +n664: token { $$ = 664; }; +n665: token { $$ = 665; }; +n666: token { $$ = 666; }; +n667: token { $$ = 667; }; +n668: token { $$ = 668; }; +n669: token { $$ = 669; }; +n670: token { $$ = 670; }; +n671: token { $$ = 671; }; +n672: token { $$ = 672; }; +n673: token { $$ = 673; }; +n674: token { $$ = 674; }; +n675: token { $$ = 675; }; +n676: token { $$ = 676; }; +n677: token { $$ = 677; }; +n678: token { $$ = 678; }; +n679: token { $$ = 679; }; +n680: token { $$ = 680; }; +n681: token { $$ = 681; }; +n682: token { $$ = 682; }; +n683: token { $$ = 683; }; +n684: token { $$ = 684; }; +n685: token { $$ = 685; }; +n686: token { $$ = 686; }; +n687: token { $$ = 687; }; +n688: token { $$ = 688; }; +n689: token { $$ = 689; }; +n690: token { $$ = 690; }; +n691: token { $$ = 691; }; +n692: token { $$ = 692; }; +n693: token { $$ = 693; }; +n694: token { $$ = 694; }; +n695: token { $$ = 695; }; +n696: token { $$ = 696; }; +n697: token { $$ = 697; }; +n698: token { $$ = 698; }; +n699: token { $$ = 699; }; +n700: token { $$ = 700; }; +n701: token { $$ = 701; }; +n702: token { $$ = 702; }; +n703: token { $$ = 703; }; +n704: token { $$ = 704; }; +n705: token { $$ = 705; }; +n706: token { $$ = 706; }; +n707: token { $$ = 707; }; +n708: token { $$ = 708; }; +n709: token { $$ = 709; }; +n710: token { $$ = 710; }; +n711: token { $$ = 711; }; +n712: token { $$ = 712; }; +n713: token { $$ = 713; }; +n714: token { $$ = 714; }; +n715: token { $$ = 715; }; +n716: token { $$ = 716; }; +n717: token { $$ = 717; }; +n718: token { $$ = 718; }; +n719: token { $$ = 719; }; +n720: token { $$ = 720; }; +n721: token { $$ = 721; }; +n722: token { $$ = 722; }; +n723: token { $$ = 723; }; +n724: token { $$ = 724; }; +n725: token { $$ = 725; }; +n726: token { $$ = 726; }; +n727: token { $$ = 727; }; +n728: token { $$ = 728; }; +n729: token { $$ = 729; }; +n730: token { $$ = 730; }; +n731: token { $$ = 731; }; +n732: token { $$ = 732; }; +n733: token { $$ = 733; }; +n734: token { $$ = 734; }; +n735: token { $$ = 735; }; +n736: token { $$ = 736; }; +n737: token { $$ = 737; }; +n738: token { $$ = 738; }; +n739: token { $$ = 739; }; +n740: token { $$ = 740; }; +n741: token { $$ = 741; }; +n742: token { $$ = 742; }; +n743: token { $$ = 743; }; +n744: token { $$ = 744; }; +n745: token { $$ = 745; }; +n746: token { $$ = 746; }; +n747: token { $$ = 747; }; +n748: token { $$ = 748; }; +n749: token { $$ = 749; }; +n750: token { $$ = 750; }; +n751: token { $$ = 751; }; +n752: token { $$ = 752; }; +n753: token { $$ = 753; }; +n754: token { $$ = 754; }; +n755: token { $$ = 755; }; +n756: token { $$ = 756; }; +n757: token { $$ = 757; }; +n758: token { $$ = 758; }; +n759: token { $$ = 759; }; +n760: token { $$ = 760; }; +n761: token { $$ = 761; }; +n762: token { $$ = 762; }; +n763: token { $$ = 763; }; +n764: token { $$ = 764; }; +n765: token { $$ = 765; }; +n766: token { $$ = 766; }; +n767: token { $$ = 767; }; +n768: token { $$ = 768; }; +n769: token { $$ = 769; }; +n770: token { $$ = 770; }; +n771: token { $$ = 771; }; +n772: token { $$ = 772; }; +n773: token { $$ = 773; }; +n774: token { $$ = 774; }; +n775: token { $$ = 775; }; +n776: token { $$ = 776; }; +n777: token { $$ = 777; }; +n778: token { $$ = 778; }; +n779: token { $$ = 779; }; +n780: token { $$ = 780; }; +n781: token { $$ = 781; }; +n782: token { $$ = 782; }; +n783: token { $$ = 783; }; +n784: token { $$ = 784; }; +n785: token { $$ = 785; }; +n786: token { $$ = 786; }; +n787: token { $$ = 787; }; +n788: token { $$ = 788; }; +n789: token { $$ = 789; }; +n790: token { $$ = 790; }; +n791: token { $$ = 791; }; +n792: token { $$ = 792; }; +n793: token { $$ = 793; }; +n794: token { $$ = 794; }; +n795: token { $$ = 795; }; +n796: token { $$ = 796; }; +n797: token { $$ = 797; }; +n798: token { $$ = 798; }; +n799: token { $$ = 799; }; +n800: token { $$ = 800; }; +n801: token { $$ = 801; }; +n802: token { $$ = 802; }; +n803: token { $$ = 803; }; +n804: token { $$ = 804; }; +n805: token { $$ = 805; }; +n806: token { $$ = 806; }; +n807: token { $$ = 807; }; +n808: token { $$ = 808; }; +n809: token { $$ = 809; }; +n810: token { $$ = 810; }; +n811: token { $$ = 811; }; +n812: token { $$ = 812; }; +n813: token { $$ = 813; }; +n814: token { $$ = 814; }; +n815: token { $$ = 815; }; +n816: token { $$ = 816; }; +n817: token { $$ = 817; }; +n818: token { $$ = 818; }; +n819: token { $$ = 819; }; +n820: token { $$ = 820; }; +n821: token { $$ = 821; }; +n822: token { $$ = 822; }; +n823: token { $$ = 823; }; +n824: token { $$ = 824; }; +n825: token { $$ = 825; }; +n826: token { $$ = 826; }; +n827: token { $$ = 827; }; +n828: token { $$ = 828; }; +n829: token { $$ = 829; }; +n830: token { $$ = 830; }; +n831: token { $$ = 831; }; +n832: token { $$ = 832; }; +n833: token { $$ = 833; }; +n834: token { $$ = 834; }; +n835: token { $$ = 835; }; +n836: token { $$ = 836; }; +n837: token { $$ = 837; }; +n838: token { $$ = 838; }; +n839: token { $$ = 839; }; +n840: token { $$ = 840; }; +n841: token { $$ = 841; }; +n842: token { $$ = 842; }; +n843: token { $$ = 843; }; +n844: token { $$ = 844; }; +n845: token { $$ = 845; }; +n846: token { $$ = 846; }; +n847: token { $$ = 847; }; +n848: token { $$ = 848; }; +n849: token { $$ = 849; }; +n850: token { $$ = 850; }; +n851: token { $$ = 851; }; +n852: token { $$ = 852; }; +n853: token { $$ = 853; }; +n854: token { $$ = 854; }; +n855: token { $$ = 855; }; +n856: token { $$ = 856; }; +n857: token { $$ = 857; }; +n858: token { $$ = 858; }; +n859: token { $$ = 859; }; +n860: token { $$ = 860; }; +n861: token { $$ = 861; }; +n862: token { $$ = 862; }; +n863: token { $$ = 863; }; +n864: token { $$ = 864; }; +n865: token { $$ = 865; }; +n866: token { $$ = 866; }; +n867: token { $$ = 867; }; +n868: token { $$ = 868; }; +n869: token { $$ = 869; }; +n870: token { $$ = 870; }; +n871: token { $$ = 871; }; +n872: token { $$ = 872; }; +n873: token { $$ = 873; }; +n874: token { $$ = 874; }; +n875: token { $$ = 875; }; +n876: token { $$ = 876; }; +n877: token { $$ = 877; }; +n878: token { $$ = 878; }; +n879: token { $$ = 879; }; +n880: token { $$ = 880; }; +n881: token { $$ = 881; }; +n882: token { $$ = 882; }; +n883: token { $$ = 883; }; +n884: token { $$ = 884; }; +n885: token { $$ = 885; }; +n886: token { $$ = 886; }; +n887: token { $$ = 887; }; +n888: token { $$ = 888; }; +n889: token { $$ = 889; }; +n890: token { $$ = 890; }; +n891: token { $$ = 891; }; +n892: token { $$ = 892; }; +n893: token { $$ = 893; }; +n894: token { $$ = 894; }; +n895: token { $$ = 895; }; +n896: token { $$ = 896; }; +n897: token { $$ = 897; }; +n898: token { $$ = 898; }; +n899: token { $$ = 899; }; +n900: token { $$ = 900; }; +n901: token { $$ = 901; }; +n902: token { $$ = 902; }; +n903: token { $$ = 903; }; +n904: token { $$ = 904; }; +n905: token { $$ = 905; }; +n906: token { $$ = 906; }; +n907: token { $$ = 907; }; +n908: token { $$ = 908; }; +n909: token { $$ = 909; }; +n910: token { $$ = 910; }; +n911: token { $$ = 911; }; +n912: token { $$ = 912; }; +n913: token { $$ = 913; }; +n914: token { $$ = 914; }; +n915: token { $$ = 915; }; +n916: token { $$ = 916; }; +n917: token { $$ = 917; }; +n918: token { $$ = 918; }; +n919: token { $$ = 919; }; +n920: token { $$ = 920; }; +n921: token { $$ = 921; }; +n922: token { $$ = 922; }; +n923: token { $$ = 923; }; +n924: token { $$ = 924; }; +n925: token { $$ = 925; }; +n926: token { $$ = 926; }; +n927: token { $$ = 927; }; +n928: token { $$ = 928; }; +n929: token { $$ = 929; }; +n930: token { $$ = 930; }; +n931: token { $$ = 931; }; +n932: token { $$ = 932; }; +n933: token { $$ = 933; }; +n934: token { $$ = 934; }; +n935: token { $$ = 935; }; +n936: token { $$ = 936; }; +n937: token { $$ = 937; }; +n938: token { $$ = 938; }; +n939: token { $$ = 939; }; +n940: token { $$ = 940; }; +n941: token { $$ = 941; }; +n942: token { $$ = 942; }; +n943: token { $$ = 943; }; +n944: token { $$ = 944; }; +n945: token { $$ = 945; }; +n946: token { $$ = 946; }; +n947: token { $$ = 947; }; +n948: token { $$ = 948; }; +n949: token { $$ = 949; }; +n950: token { $$ = 950; }; +n951: token { $$ = 951; }; +n952: token { $$ = 952; }; +n953: token { $$ = 953; }; +n954: token { $$ = 954; }; +n955: token { $$ = 955; }; +n956: token { $$ = 956; }; +n957: token { $$ = 957; }; +n958: token { $$ = 958; }; +n959: token { $$ = 959; }; +n960: token { $$ = 960; }; +n961: token { $$ = 961; }; +n962: token { $$ = 962; }; +n963: token { $$ = 963; }; +n964: token { $$ = 964; }; +n965: token { $$ = 965; }; +n966: token { $$ = 966; }; +n967: token { $$ = 967; }; +n968: token { $$ = 968; }; +n969: token { $$ = 969; }; +n970: token { $$ = 970; }; +n971: token { $$ = 971; }; +n972: token { $$ = 972; }; +n973: token { $$ = 973; }; +n974: token { $$ = 974; }; +n975: token { $$ = 975; }; +n976: token { $$ = 976; }; +n977: token { $$ = 977; }; +n978: token { $$ = 978; }; +n979: token { $$ = 979; }; +n980: token { $$ = 980; }; +n981: token { $$ = 981; }; +n982: token { $$ = 982; }; +n983: token { $$ = 983; }; +n984: token { $$ = 984; }; +n985: token { $$ = 985; }; +n986: token { $$ = 986; }; +n987: token { $$ = 987; }; +n988: token { $$ = 988; }; +n989: token { $$ = 989; }; +n990: token { $$ = 990; }; +n991: token { $$ = 991; }; +n992: token { $$ = 992; }; +n993: token { $$ = 993; }; +n994: token { $$ = 994; }; +n995: token { $$ = 995; }; +n996: token { $$ = 996; }; +n997: token { $$ = 997; }; +n998: token { $$ = 998; }; +n999: token { $$ = 999; }; +n1000: token { $$ = 1000; }; +%% + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int return_token = 1; + static int counter = 1; + if (counter > MAX) + { + assert (counter++ == MAX + 1); + return 0; + } + if (return_token) + { + return_token = 0; + return token; + } + return_token = 1; + return counter++; +} + +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 +./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y + | (#) + (#) = 2222 ./calc.at:1494: $PREPARSER ./calc input -./calc.at:1489: cat stderr +./calc.at:1492: cat stderr stderr: -642. regression.at:1221: testing Expecting two tokens %glr-parser ... -./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +input: Starting parse Entering state 0 Reading a token @@ -231042,103 +230887,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -231151,18 +230977,10 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -641. regression.at:1220: testing Expecting two tokens ... -./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -input: -./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS - | (!!) + (1 2) = 1 + | (1 + 1) / (1 - 1) +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input -./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: Starting parse Entering state 0 @@ -231171,715 +230989,101 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token '=' () +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1494: cat stderr -./existing.at:808: grep '^State.*conflicts:' input.output -input: -./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS - | (* *) + (*) + (*) -./calc.at:1494: $PREPARSER ./calc input -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -643. regression.at:1222: testing Expecting two tokens lalr1.cc ... -./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y -stderr: -./calc.at:1489: cat stderr -./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS -input: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none - | (- *) + (1 2) = 1 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token Next token is token ')' (1.17: ) -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -231893,832 +231097,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: -./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS - | 1 + 2 * 3 + !+ ++ -./calc.at:1494: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | 1 + 2 * 3 + !- ++ -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1489: cat stderr -./existing.at:808: grep '^State.*conflicts:' input.output -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1494: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - | (#) + (#) = 2222 -./calc.at:1494: $PREPARSER ./calc input -input: -./regression.at:917: cat tables.c - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -232816,226 +231195,110 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -637. regression.at:812: stderr: - ok -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -233058,8 +231321,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1489: "$PERL" -pi -e 'use strict; +614. torture.at:485: testing Exploding the Stack Size with Alloca ... +./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -233070,14 +231334,10 @@ }eg ' expout || exit 77 ./calc.at:1494: cat stderr -./calc.at:1489: cat stderr -input: +./calc.at:1492: cat stderr input: | (1 + #) = 1111 ./calc.at:1494: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1489: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 @@ -233156,208 +231416,12 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +567. calc.at:1492: ok ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -233432,16 +231496,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -233452,97 +231507,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr +615. torture.at:531: testing Exploding the Stack Size with Malloc ... +./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./calc.at:1494: cat stderr input: -input: - | (1 + #) = 1111 -./calc.at:1489: $PREPARSER ./calc input | (# + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -233609,85 +231583,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () stderr: Starting parse Entering state 0 @@ -233758,6 +231654,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -233768,23 +231665,2061 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -644. regression.at:1230: testing Braced code in declaration in rules section ... -./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: ./calc.at:1494: cat stderr +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} + +%define parse.error verbose +%debug +%{ +#include +#include +#include +#define MAX 200 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} +%union +{ + int val; +}; + +%token END "end" +%type exp input +%token t1 1 "1" +%token t2 2 "2" +%token t3 3 "3" +%token t4 4 "4" +%token t5 5 "5" +%token t6 6 "6" +%token t7 7 "7" +%token t8 8 "8" +%token t9 9 "9" +%token t10 10 "10" +%token t11 11 "11" +%token t12 12 "12" +%token t13 13 "13" +%token t14 14 "14" +%token t15 15 "15" +%token t16 16 "16" +%token t17 17 "17" +%token t18 18 "18" +%token t19 19 "19" +%token t20 20 "20" +%token t21 21 "21" +%token t22 22 "22" +%token t23 23 "23" +%token t24 24 "24" +%token t25 25 "25" +%token t26 26 "26" +%token t27 27 "27" +%token t28 28 "28" +%token t29 29 "29" +%token t30 30 "30" +%token t31 31 "31" +%token t32 32 "32" +%token t33 33 "33" +%token t34 34 "34" +%token t35 35 "35" +%token t36 36 "36" +%token t37 37 "37" +%token t38 38 "38" +%token t39 39 "39" +%token t40 40 "40" +%token t41 41 "41" +%token t42 42 "42" +%token t43 43 "43" +%token t44 44 "44" +%token t45 45 "45" +%token t46 46 "46" +%token t47 47 "47" +%token t48 48 "48" +%token t49 49 "49" +%token t50 50 "50" +%token t51 51 "51" +%token t52 52 "52" +%token t53 53 "53" +%token t54 54 "54" +%token t55 55 "55" +%token t56 56 "56" +%token t57 57 "57" +%token t58 58 "58" +%token t59 59 "59" +%token t60 60 "60" +%token t61 61 "61" +%token t62 62 "62" +%token t63 63 "63" +%token t64 64 "64" +%token t65 65 "65" +%token t66 66 "66" +%token t67 67 "67" +%token t68 68 "68" +%token t69 69 "69" +%token t70 70 "70" +%token t71 71 "71" +%token t72 72 "72" +%token t73 73 "73" +%token t74 74 "74" +%token t75 75 "75" +%token t76 76 "76" +%token t77 77 "77" +%token t78 78 "78" +%token t79 79 "79" +%token t80 80 "80" +%token t81 81 "81" +%token t82 82 "82" +%token t83 83 "83" +%token t84 84 "84" +%token t85 85 "85" +%token t86 86 "86" +%token t87 87 "87" +%token t88 88 "88" +%token t89 89 "89" +%token t90 90 "90" +%token t91 91 "91" +%token t92 92 "92" +%token t93 93 "93" +%token t94 94 "94" +%token t95 95 "95" +%token t96 96 "96" +%token t97 97 "97" +%token t98 98 "98" +%token t99 99 "99" +%token t100 100 "100" +%token t101 101 "101" +%token t102 102 "102" +%token t103 103 "103" +%token t104 104 "104" +%token t105 105 "105" +%token t106 106 "106" +%token t107 107 "107" +%token t108 108 "108" +%token t109 109 "109" +%token t110 110 "110" +%token t111 111 "111" +%token t112 112 "112" +%token t113 113 "113" +%token t114 114 "114" +%token t115 115 "115" +%token t116 116 "116" +%token t117 117 "117" +%token t118 118 "118" +%token t119 119 "119" +%token t120 120 "120" +%token t121 121 "121" +%token t122 122 "122" +%token t123 123 "123" +%token t124 124 "124" +%token t125 125 "125" +%token t126 126 "126" +%token t127 127 "127" +%token t128 128 "128" +%token t129 129 "129" +%token t130 130 "130" +%token t131 131 "131" +%token t132 132 "132" +%token t133 133 "133" +%token t134 134 "134" +%token t135 135 "135" +%token t136 136 "136" +%token t137 137 "137" +%token t138 138 "138" +%token t139 139 "139" +%token t140 140 "140" +%token t141 141 "141" +%token t142 142 "142" +%token t143 143 "143" +%token t144 144 "144" +%token t145 145 "145" +%token t146 146 "146" +%token t147 147 "147" +%token t148 148 "148" +%token t149 149 "149" +%token t150 150 "150" +%token t151 151 "151" +%token t152 152 "152" +%token t153 153 "153" +%token t154 154 "154" +%token t155 155 "155" +%token t156 156 "156" +%token t157 157 "157" +%token t158 158 "158" +%token t159 159 "159" +%token t160 160 "160" +%token t161 161 "161" +%token t162 162 "162" +%token t163 163 "163" +%token t164 164 "164" +%token t165 165 "165" +%token t166 166 "166" +%token t167 167 "167" +%token t168 168 "168" +%token t169 169 "169" +%token t170 170 "170" +%token t171 171 "171" +%token t172 172 "172" +%token t173 173 "173" +%token t174 174 "174" +%token t175 175 "175" +%token t176 176 "176" +%token t177 177 "177" +%token t178 178 "178" +%token t179 179 "179" +%token t180 180 "180" +%token t181 181 "181" +%token t182 182 "182" +%token t183 183 "183" +%token t184 184 "184" +%token t185 185 "185" +%token t186 186 "186" +%token t187 187 "187" +%token t188 188 "188" +%token t189 189 "189" +%token t190 190 "190" +%token t191 191 "191" +%token t192 192 "192" +%token t193 193 "193" +%token t194 194 "194" +%token t195 195 "195" +%token t196 196 "196" +%token t197 197 "197" +%token t198 198 "198" +%token t199 199 "199" +%token t200 200 "200" +%% +input: + exp { assert ($1 == 0); $$ = $1; } +| input exp { assert ($2 == $1 + 1); $$ = $2; } +; + +exp: + END + { $$ = 0; } +| "1" END + { $$ = 1; } +| "1" "2" END + { $$ = 2; } +| "1" "2" "3" END + { $$ = 3; } +| "1" "2" "3" "4" END + { $$ = 4; } +| "1" "2" "3" "4" "5" END + { $$ = 5; } +| "1" "2" "3" "4" "5" "6" END + { $$ = 6; } +| "1" "2" "3" "4" "5" "6" "7" END + { $$ = 7; } +| "1" "2" "3" "4" "5" "6" "7" "8" END + { $$ = 8; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" END + { $$ = 9; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END + { $$ = 10; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END + { $$ = 11; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END + { $$ = 12; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END + { $$ = 13; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END + { $$ = 14; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END + { $$ = 15; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + END + { $$ = 16; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" END + { $$ = 17; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" END + { $$ = 18; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" END + { $$ = 19; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" END + { $$ = 20; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" END + { $$ = 21; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" END + { $$ = 22; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" END + { $$ = 23; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" END + { $$ = 24; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" END + { $$ = 25; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END + { $$ = 26; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END + { $$ = 27; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END + { $$ = 28; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END + { $$ = 29; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + END + { $$ = 30; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" END + { $$ = 31; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" END + { $$ = 32; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" END + { $$ = 33; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" END + { $$ = 34; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" END + { $$ = 35; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" END + { $$ = 36; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" END + { $$ = 37; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" END + { $$ = 38; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" END + { $$ = 39; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END + { $$ = 40; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END + { $$ = 41; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END + { $$ = 42; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END + { $$ = 43; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + END + { $$ = 44; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" END + { $$ = 45; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" END + { $$ = 46; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" END + { $$ = 47; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" END + { $$ = 48; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" END + { $$ = 49; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" END + { $$ = 50; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" END + { $$ = 51; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" END + { $$ = 52; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" END + { $$ = 53; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END + { $$ = 54; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END + { $$ = 55; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END + { $$ = 56; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END + { $$ = 57; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + END + { $$ = 58; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" END + { $$ = 59; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" END + { $$ = 60; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" END + { $$ = 61; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" END + { $$ = 62; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" END + { $$ = 63; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" END + { $$ = 64; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" END + { $$ = 65; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" END + { $$ = 66; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" END + { $$ = 67; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END + { $$ = 68; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END + { $$ = 69; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END + { $$ = 70; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END + { $$ = 71; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + END + { $$ = 72; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" END + { $$ = 73; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" END + { $$ = 74; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" END + { $$ = 75; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" END + { $$ = 76; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" END + { $$ = 77; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" END + { $$ = 78; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" END + { $$ = 79; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" END + { $$ = 80; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" END + { $$ = 81; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END + { $$ = 82; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END + { $$ = 83; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END + { $$ = 84; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END + { $$ = 85; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + END + { $$ = 86; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" END + { $$ = 87; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" END + { $$ = 88; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" END + { $$ = 89; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" END + { $$ = 90; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" END + { $$ = 91; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" END + { $$ = 92; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" END + { $$ = 93; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" END + { $$ = 94; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" END + { $$ = 95; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END + { $$ = 96; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END + { $$ = 97; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END + { $$ = 98; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END + { $$ = 99; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + END + { $$ = 100; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" END + { $$ = 101; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" END + { $$ = 102; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" END + { $$ = 103; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" END + { $$ = 104; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" END + { $$ = 105; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" END + { $$ = 106; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" END + { $$ = 107; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" END + { $$ = 108; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" END + { $$ = 109; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END + { $$ = 110; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END + { $$ = 111; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + END + { $$ = 112; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" END + { $$ = 113; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" END + { $$ = 114; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" END + { $$ = 115; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" END + { $$ = 116; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" END + { $$ = 117; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" END + { $$ = 118; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" END + { $$ = 119; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" END + { $$ = 120; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" END + { $$ = 121; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END + { $$ = 122; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END + { $$ = 123; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + END + { $$ = 124; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" END + { $$ = 125; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" END + { $$ = 126; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" END + { $$ = 127; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" END + { $$ = 128; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" END + { $$ = 129; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" END + { $$ = 130; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" END + { $$ = 131; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" END + { $$ = 132; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" END + { $$ = 133; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END + { $$ = 134; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END + { $$ = 135; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + END + { $$ = 136; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" END + { $$ = 137; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" END + { $$ = 138; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" END + { $$ = 139; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" END + { $$ = 140; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" END + { $$ = 141; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" END + { $$ = 142; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" END + { $$ = 143; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" END + { $$ = 144; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" END + { $$ = 145; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END + { $$ = 146; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END + { $$ = 147; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + END + { $$ = 148; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" END + { $$ = 149; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" END + { $$ = 150; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" END + { $$ = 151; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" END + { $$ = 152; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" END + { $$ = 153; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" END + { $$ = 154; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" END + { $$ = 155; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" END + { $$ = 156; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" END + { $$ = 157; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END + { $$ = 158; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END + { $$ = 159; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + END + { $$ = 160; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" END + { $$ = 161; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" END + { $$ = 162; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" END + { $$ = 163; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" END + { $$ = 164; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" END + { $$ = 165; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" END + { $$ = 166; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" END + { $$ = 167; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" END + { $$ = 168; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" END + { $$ = 169; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END + { $$ = 170; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END + { $$ = 171; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + END + { $$ = 172; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" END + { $$ = 173; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" END + { $$ = 174; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" END + { $$ = 175; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" END + { $$ = 176; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" END + { $$ = 177; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" END + { $$ = 178; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" END + { $$ = 179; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" END + { $$ = 180; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" END + { $$ = 181; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END + { $$ = 182; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END + { $$ = 183; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + END + { $$ = 184; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" END + { $$ = 185; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" END + { $$ = 186; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" END + { $$ = 187; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" END + { $$ = 188; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" END + { $$ = 189; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" END + { $$ = 190; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" END + { $$ = 191; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" END + { $$ = 192; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" END + { $$ = 193; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END + { $$ = 194; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END + { $$ = 195; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + END + { $$ = 196; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" END + { $$ = 197; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" END + { $$ = 198; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" END + { $$ = 199; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" END + { $$ = 200; } +; +%% + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int inner = 1; + static int outer = 0; + if (outer > MAX) + return 0; + else if (inner > outer) + { + inner = 1; + ++outer; + return END; + } + return inner++; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y input: | (1 + # + 1) = 1111 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1494: $PREPARSER ./calc input -./calc.at:1489: cat stderr stderr: Starting parse Entering state 0 @@ -233869,16 +233804,9 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: $PREPARSER ./input -input: - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input stderr: -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -233962,79 +233890,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -619. existing.at:808: ok -stderr: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -234045,93 +233900,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () ./calc.at:1494: cat stderr input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | (1 + 1) / (1 - 1) ./calc.at:1494: $PREPARSER ./calc input - -./calc.at:1489: cat stderr stderr: -./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -234247,9 +234020,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -234367,91 +234137,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -234462,702 +234149,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () ./calc.at:1494: cat stderr 569. calc.at:1494: ok -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./regression.at:437: $PREPARSER ./input -./calc.at:1489: cat stderr -stderr: - -syntax error, unexpected a, expecting ∃¬∩∪∀ -./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -632. regression.at:437: ok -stderr: -stdout: - -./existing.at:808: $PREPARSER ./input -stderr: -stderr: -stdout: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:438: $PREPARSER ./input -stderr: -syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -620. existing.at:808: ok -./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -645. regression.at:1291: testing String alias declared after use ... -./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1489: cat stderr -stderr: -564. calc.at:1489: ok -stdout: -./regression.at:1143: $PREPARSER ./dancer - -633. regression.at:438: ok -stderr: -syntax error, unexpected ':' -./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -638. regression.at:1143: ok - - - -stderr: -stdout: -./regression.at:1220: $PREPARSER ./expect2 -646. regression.at:1314: testing Extra lookahead sets in report ... -./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -stderr: -645. regression.at:1291: ok -syntax error, unexpected '+', expecting A or B -./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -641. regression.at:1220: ok - -647. regression.at:1355: testing Token number in precedence declaration ... -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y - -./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output -648. regression.at:1408: testing parse-gram.y: LALR = IELR ... -./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y -646. regression.at:1314: ok -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -650. regression.at:1504: testing parse.error=verbose overflow ... -649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... -./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -651. regression.at:1628: testing LAC: Exploratory stack ... -./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -652. regression.at:1739: testing LAC: Memory exhaustion ... -./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y -./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -653. regression.at:1874: testing Lex and parse params: yacc.c ... -./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] -./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -654. regression.at:1875: testing Lex and parse params: glr.c ... -./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./regression.at:1263: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 20): --> $$ = nterm start () -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'a' (PRINTER) -syntax error, unexpected 'a', expecting end of file -Error: popping nterm start () -Stack now 0 -Cleanup: discarding lookahead token 'a' (PRINTER) -DESTRUCTOR -Stack now 0 -./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -644. regression.at:1230: ok - -stderr: -stdout: -./regression.at:1874: $PREPARSER ./input -655. regression.at:1876: testing Lex and parse params: lalr1.cc ... -./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -653. regression.at:1874: ok -stderr: -stderr: -stdout: -stdout: -./regression.at:1613: $PREPARSER ./input -./regression.at:1483: $PREPARSER ./input -stderr: -stderr: - -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C -syntax error, unexpected 'd' -syntax error -memory exhausted -./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -650. regression.at:1504: ok -649. regression.at:1430: ok -./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - -./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y - -stderr: -stdout: -./regression.at:1144: $PREPARSER ./dancer -stderr: -syntax error, unexpected ':' -./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -639. regression.at:1144: ok -stderr: - -stdout: -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1221: $PREPARSER ./expect2 -stderr: -syntax error, unexpected '+', expecting A or B -./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -642. regression.at:1221: ok - -656. regression.at:1877: testing Lex and parse params: glr.cc ... -./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -658. regression.at:1889: testing stdio.h is not needed ... -./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -657. regression.at:1878: testing Lex and parse params: glr2.cc ... -./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -659. push.at:25: testing Memory Leak for Early Deletion ... -./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./regression.at:1394: $PREPARSER ./input -stderr: -./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -647. regression.at:1355: ok -660. push.at:84: testing Multiple impure instances ... -./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -661. push.at:145: testing Unsupported Skeletons ... -./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -661. push.at:145: ok - -stderr: -stdout: -658. regression.at:1889: ok -stderr: -stdout: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt - -stderr: -./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1713: grep 'syntax error,' stderr.txt -./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1420: diff lalr.c ielr.c -648. regression.at:1408: ok -./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stderr: -stdout: -./push.at:76: $PREPARSER ./input -stderr: -./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -659. push.at:25: ok -stderr: -stdout: -./existing.at:1460: $PREPARSER ./input -stderr: -syntax error, unexpected LEFT -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -622. existing.at:1460: ok -662. push.at:167: testing Pstate reuse ... -./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: - -./push.at:134: $PREPARSER ./input -stderr: -./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -663. c++.at:26: testing C++ Locations Unit Tests ... -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... -./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy -665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -666. c++.at:566: testing Variants lalr1.cc ... -stderr: -stdout: -./existing.at:1460: $PREPARSER ./input -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -623. existing.at:1460: ok - -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stderr: -input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 16 | | "twice" exp { $$ = $2 + $2; } - | ^~ -input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~~~ -input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~ -stdout: -./regression.at:1875: $PREPARSER ./input -stderr: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:263: sed 's,.*/$,,' stderr 1>&2 -./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -654. regression.at:1875: ok -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error - -667. c++.at:567: testing Variants lalr1.cc parse.assert ... -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -stdout: -./push.at:135: $PREPARSER ./input -stderr: -./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -660. push.at:84: ok - -668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -665. c++.at:247: ok -./regression.at:1145: $PREPARSER ./dancer -stderr: -syntax error, unexpected ':' -./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -640. regression.at:1145: stderr: - ok -stdout: -./regression.at:1222: $PREPARSER ./expect2 -stderr: -syntax error, unexpected '+', expecting A or B -./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -643. regression.at:1222: ok +617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: -stdout: -./regression.at:1772: $PREPARSER ./input --debug -stderr: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -LAC: initial context established for "end of file" -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -memory exhausted -Cleanup: discarding lookahead token "end of file" () -Stack now 0 -./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y -670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error stderr: stdout: -./calc.at:1492: "$PERL" -ne ' +./calc.at:1491: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -235168,12 +234203,6 @@ || /\t/ )' calc.cc calc.hh -stderr: -stdout: -672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... -./push.at:277: ./input -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -235188,10 +234217,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1492: $PREPARSER ./calc input -662. push.at:167: ok -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -236029,7 +235055,7 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -236869,7 +235895,39 @@ Cleanup: popping nterm input (1.1-14.0: ) input: | 1 2 -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: Starting parse Entering state 0 @@ -236886,7 +235944,8 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 stderr: Starting parse Entering state 0 @@ -236903,7 +235962,8 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) -./calc.at:1492: "$PERL" -pi -e 'use strict; +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -236913,10 +235973,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr +./calc.at:1491: cat stderr input: | 1//2 -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -236938,7 +235998,7 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -236960,8 +236020,7 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -236971,15 +236030,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... -./calc.at:1492: cat stderr -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./calc.at:1491: cat stderr input: | error -./calc.at:1492: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -236987,7 +236041,7 @@ Next token is token invalid token (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -236995,7 +236049,7 @@ Next token is token invalid token (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -237005,10 +236059,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr +./calc.at:1491: cat stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none input: | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -237039,7 +236094,7 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -237070,7 +236125,7 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -237080,45 +236135,2322 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr +./calc.at:1491: cat stderr input: | | +1 -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +./calc.at:1491: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +input: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none + | (1 + #) = 1111 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +input: + | (# + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./calc.at:1491: cat stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +input: + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -stdout: -./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -237126,47 +238458,12 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1714: grep 'syntax error,' stderr.txt -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./calc.at:1492: cat stderr -./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./calc.at:1492: $PREPARSER ./calc /dev/null -./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -Starting parse -Entering state 0 -Reading a token Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./calc.at:1492: "$PERL" -pi -e 'use strict; +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -237176,9 +238473,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1491: cat stderr stderr: stdout: -./calc.at:1491: "$PERL" -ne ' +./calc.at:1492: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -237189,12 +238487,10 @@ || /\t/ )' calc.cc calc.hh -./calc.at:1492: cat stderr -./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: + | (1 + 1) / (1 - 1) +./calc.at:1491: $PREPARSER ./calc input input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1492: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -237208,7 +238504,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -237217,234 +238513,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) + $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -237457,7 +238621,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -237466,234 +238630,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) + $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -238543,8 +239575,8 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -238554,9 +239586,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./calc.at:1492: cat stderr +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -239395,11 +240426,9 @@ Cleanup: popping nterm input (1.1-14.0: ) input: | 1 2 -./calc.at:1491: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 ./calc.at:1492: $PREPARSER ./calc input stderr: +566. calc.at:1491: ok Starting parse Entering state 0 Reading a token @@ -239415,7 +240444,805 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) + +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +input: + | 1//2 +./calc.at:1492: $PREPARSER ./calc input +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input: + | error +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... +input: + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +input: + | + | +1 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./calc.at:1492: cat stderr +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -239528,22 +241355,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -239657,16 +241468,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -239677,57 +241478,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: cat stderr ./calc.at:1492: cat stderr input: - | 1//2 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: | (- *) + (1 2) = 1 ./calc.at:1492: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) stderr: Starting parse Entering state 0 @@ -239845,18 +241599,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr stderr: Starting parse Entering state 0 @@ -239974,10 +241717,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1491: $PREPARSER ./calc input -stderr: ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -239988,36 +241727,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) input: | (* *) + (*) + (*) ./calc.at:1492: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -240134,10 +241848,7 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 stderr: -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -240253,37 +241964,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240294,53 +241974,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -stderr: ./calc.at:1492: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) input: | 1 + 2 * 3 + !+ ++ ./calc.at:1492: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -240407,9 +242045,6 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 stderr: Starting parse Entering state 0 @@ -240476,7 +242111,6 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: $PREPARSER ./calc input input: | 1 + 2 * 3 + !- ++ ./calc.at:1492: $PREPARSER ./calc input @@ -240484,28 +242118,6 @@ Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -240568,38 +242180,8 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -240665,7 +242247,6 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: cat stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240676,50 +242257,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) ./calc.at:1492: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' input: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) | (#) + (#) = 2222 ./calc.at:1492: $PREPARSER ./calc input stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1494: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -240818,7 +242366,6 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr stderr: Starting parse Entering state 0 @@ -240917,24 +242464,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1491: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240946,256 +242475,7 @@ }eg ' expout || exit 77 ./calc.at:1492: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + #) = 1111 ./calc.at:1492: $PREPARSER ./calc input stderr: @@ -241206,234 +242486,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -241446,6 +242556,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -241524,10 +242635,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -241537,6 +242645,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -241544,26 +242657,18 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Error: discarding token '+' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -241614,81 +242719,65 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -241696,766 +242785,485 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Reading a token +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 -Next token is token '\n' (7.10-8.0: ) +Reading a token +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' (1.17: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stdout: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1876: $PREPARSER ./input -./calc.at:1491: cat stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr +568. calc.at:1492: ok + +619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -243293,469 +244101,7 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -stderr: -./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: - | 1 2 - | (!!) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: $PREPARSER ./calc input -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1492: cat stderr -655. regression.at:1876: ok -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | 1//2 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -243768,263 +244114,69 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: -stderr: - | (1 + # + 1) = 1111 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -244032,741 +244184,763 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | error -./calc.at:1494: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Starting parse -Entering state 0 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (10.11: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) input: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 + | 1 2 ./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse @@ -244780,62 +244954,74 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stdout: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./existing.at:74: $PREPARSER ./input stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +stderr: +stderr: +stdout: +./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1494: cat stderr +./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: $PREPARSER ./input +stderr: +617. existing.at:74: ok +./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error, unexpected '*', expecting NEWLINE or '{' or ';' +./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1//2 +./calc.at:1494: $PREPARSER ./calc input +stderr: +./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 Starting parse Entering state 0 Reading a token @@ -244847,28 +245033,33 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: cat stderr +Cleanup: discarding lookahead token '/' (1.3: ) stderr: +./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... +stderr: +stderr: + +memory exhausted +memory exhausted +./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +616. existing.at:74: ok +stderr: +stderr: +memory exhausted +memory exhausted +stderr: Starting parse Entering state 0 Reading a token @@ -244880,66 +245071,78 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +memory exhausted +memory exhausted +./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr +input: + | error +./calc.at:1494: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./torture.at:238: $PREPARSER ./input +stderr: +./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +Starting parse +Entering state 0 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +stderr: +./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... +./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +605. torture.at:216: ok +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./calc.at:1494: cat stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y input: -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + | 1 = 2 = 3 +./calc.at:1494: $PREPARSER ./calc input + +621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -244969,7 +245172,8 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -568. calc.at:1492: ok +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -244982,9 +245186,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -244992,51 +245196,14 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Entering state 27 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245047,93 +245214,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1494: cat stderr -./calc.at:1491: cat stderr input: | | +1 ./calc.at:1494: $PREPARSER ./calc input -input: - | (#) + (#) = 2222 -./calc.at:1491: $PREPARSER ./calc input stderr: +622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... Starting parse Entering state 0 Reading a token @@ -245153,106 +245240,9 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -245272,7 +245262,6 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245284,127 +245273,21 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./torture.at:141: $PREPARSER ./input ./calc.at:1494: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: $PREPARSER ./calc /dev/null -./calc.at:1491: cat stderr stderr: +604. torture.at:132: ok Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) -input: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -245412,85 +245295,7 @@ Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245502,172 +245307,18 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr +stdout: +./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 ./calc.at:1494: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -245917,79 +245568,16 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -675. c++.at:584: testing Variants and Typed Midrule Actions ... -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +stderr: +memory exhausted +memory exhausted +./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +memory exhausted +memory exhausted +614. torture.at:485: ok ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: Starting parse Entering state 0 @@ -246238,17 +245826,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr +stderr: +stdout: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -246259,184 +245838,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1494: cat stderr -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - | (1 + # + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input +./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 + stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) input: +./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (!!) + (1 2) = 1 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: +./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 Starting parse Entering state 0 Reading a token @@ -246548,19 +245964,12 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -246672,8 +246081,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + 1) / (1 - 1) +stderr: +memory exhausted +memory exhausted +615. torture.at:531: ok ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -246684,127 +246095,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: $PREPARSER ./calc input ./calc.at:1494: cat stderr + stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (- *) + (1 2) = 1 +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse @@ -246814,122 +246142,6 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 @@ -247040,17 +246252,9 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: Starting parse Entering state 0 @@ -247168,7 +246372,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: cat stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247179,12 +246382,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -566. calc.at:1491: ok ./calc.at:1494: cat stderr +624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y input: | (* *) + (*) + (*) - ./calc.at:1494: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: Starting parse Entering state 0 @@ -247418,6 +246622,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +625. regression.at:25: testing Trivial grammars ... +./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247635,6 +246841,7 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: Starting parse Entering state 0 @@ -247712,8 +246919,6 @@ }eg ' expout || exit 77 ./calc.at:1494: cat stderr -676. c++.at:794: testing Doxygen Public Documentation ... -./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy input: | (#) + (#) = 2222 ./calc.at:1494: $PREPARSER ./calc input @@ -247925,16 +247130,10 @@ }eg ' expout || exit 77 ./calc.at:1494: cat stderr -./c++.at:794: doxygen --version || exit 77 ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2026-01-31 08:18:17.488666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror input: -stdout: -676. c++.at:794: | (1 + #) = 1111 + | (1 + #) = 1111 ./calc.at:1494: $PREPARSER ./calc input - skipped (c++.at:794) stderr: Starting parse Entering state 0 @@ -248013,12 +247212,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: - Starting parse Entering state 0 Reading a token @@ -248096,8 +247291,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -248108,13 +247301,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1715: grep 'syntax error,' stderr.txt ./calc.at:1494: cat stderr -./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt input: | (# + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input -./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt stderr: Starting parse Entering state 0 @@ -248185,7 +247375,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -248257,9 +247446,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -248270,17 +247456,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: ./calc.at:1494: cat stderr -./regression.at:1877: $PREPARSER ./input -stderr: input: | (1 + # + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input -./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -656. regression.at:1877: ok Starting parse Entering state 0 Reading a token @@ -248365,8 +247545,6 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -677. c++.at:795: testing Doxygen Private Documentation ... -./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: Starting parse Entering state 0 @@ -248451,7 +247629,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -248582,7 +247759,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -248712,66 +247888,1428 @@ ' expout || exit 77 ./calc.at:1494: cat stderr 570. calc.at:1494: ok -./c++.at:795: doxygen --version || exit 77 ---- /dev/null 2026-01-30 14:06:30.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2026-01-31 08:18:17.632666585 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stdout: -677. c++.at:795: - skipped (c++.at:795) -678. c++.at:848: testing Relative namespace references ... -./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c +626. regression.at:55: testing YYSTYPE typedef ... +./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stdout: +626. regression.at:55: ok + +stderr: +stdout: +625. regression.at:25: ok +627. regression.at:85: testing Early token definitions with --yacc ... +./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none + +./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +628. regression.at:127: testing Early token definitions without --yacc ... +./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +627. regression.at:85: ok + +629. regression.at:173: testing Braces parsing ... +stderr: +./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +stdout: +628. regression.at:127: ok + +./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c +stdout: + { tests = {{{{{{{{{{}}}}}}}}}}; } +629. regression.at:173: ok + +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +630. regression.at:196: testing Rule Line Numbers ... +./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y +631. regression.at:345: testing Mixing %token styles ... +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y +./regression.at:235: cat input.output +630. regression.at:196: ok + +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: +input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] +input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] +./regression.at:357: sed 's,.*/$,,' stderr 1>&2 +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +632. regression.at:437: testing Token definitions: parse.error=detailed ... +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none +stderr: +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +631. regression.at:345: ok +./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error + +stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:808: grep '^State.*conflicts:' input.output +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +633. regression.at:438: testing Token definitions: parse.error=verbose ... +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./regression.at:438: sed 's,.*/$,,' stderr 1>&2 +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stdout: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./existing.at:808: $PREPARSER ./input +stderr: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +619. existing.at:808: ok + +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +634. regression.at:447: testing Characters Escapes ... +./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +stdout: +./regression.at:437: $PREPARSER ./input +stderr: +syntax error, unexpected a, expecting ∃¬∩∪∀ +./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +632. regression.at:437: ok + +stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +634. regression.at:447: ok + +635. regression.at:480: testing Web2c Report ... +./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y +./regression.at:506: cat input.output +635. regression.at:480: ok + +636. regression.at:661: testing Web2c Actions ... +./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./existing.at:808: grep '^State.*conflicts:' input.output +stderr: +stdout: +./regression.at:438: $PREPARSER ./input +./regression.at:679: cat tables.c +stderr: +syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" +./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +636. regression.at:661: ok +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +633. regression.at:438: ok + + +637. regression.at:812: testing Useless Tokens ... +./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y +638. regression.at:1143: testing Dancer ... +./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +639. regression.at:1144: testing Dancer %glr-parser ... +./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +stderr: +stdout: +./existing.at:1460: $PREPARSER ./input +stderr: +syntax error, unexpected LEFT +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +622. existing.at:1460: ok +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none + +./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +stderr: +stdout: +./existing.at:808: $PREPARSER ./input +stderr: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +620. existing.at:808: ok +./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS + +640. regression.at:1145: testing Dancer lalr1.cc ... +./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y +./regression.at:917: cat tables.c +637. regression.at:812: ok + +641. regression.at:1220: testing Expecting two tokens ... +./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS +stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +642. regression.at:1221: testing Expecting two tokens %glr-parser ... +./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +stderr: +stdout: +./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +./regression.at:1143: $PREPARSER ./dancer +stderr: +syntax error, unexpected ':' +./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +638. regression.at:1143: ok + +643. regression.at:1222: testing Expecting two tokens lalr1.cc ... +./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y +./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS +stderr: +stdout: +./regression.at:1220: $PREPARSER ./expect2 +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +641. regression.at:1220: ok + +stderr: +stdout: +./existing.at:1460: $PREPARSER ./input +stderr: +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +623. existing.at:1460: ok + +644. regression.at:1230: testing Braced code in declaration in rules section ... +./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +645. regression.at:1291: testing String alias declared after use ... +./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +645. regression.at:1291: ok + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +646. regression.at:1314: testing Extra lookahead sets in report ... +./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y +./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output +stderr: +stdout: +./regression.at:1144: $PREPARSER ./dancer +646. regression.at:1314: ok +stderr: +syntax error, unexpected ':' +./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +639. regression.at:1144: ok +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 + + +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1221: $PREPARSER ./expect2 +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +642. regression.at:1221: ok + +647. regression.at:1355: testing Token number in precedence declaration ... +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y +648. regression.at:1408: testing parse-gram.y: LALR = IELR ... +./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y +649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... +./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror +stderr: +stdout: +./regression.at:1263: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 20): +-> $$ = nterm start () +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'a' (PRINTER) +syntax error, unexpected 'a', expecting end of file +Error: popping nterm start () +Stack now 0 +Cleanup: discarding lookahead token 'a' (PRINTER) +DESTRUCTOR +Stack now 0 +./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +644. regression.at:1230: input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] + ok +./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 + +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +650. regression.at:1504: testing parse.error=verbose overflow ... +./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y +stderr: +stdout: +./regression.at:1483: $PREPARSER ./input +stderr: +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +649. regression.at:1430: ok + +651. regression.at:1628: testing LAC: Exploratory stack ... +./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +stderr: +stdout: +./regression.at:1613: $PREPARSER ./input +./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C +syntax error, unexpected 'd' +syntax error +memory exhausted +./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +650. regression.at:1504: ok +stderr: +stdout: +./regression.at:1394: $PREPARSER ./input +stderr: +./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +647. regression.at:1355: ok + +652. regression.at:1739: testing LAC: Memory exhaustion ... +./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +653. regression.at:1874: testing Lex and parse params: yacc.c ... +stderr: +./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./regression.at:1145: $PREPARSER ./dancer +stderr: +syntax error, unexpected ':' +./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +640. regression.at:1145: ok +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./existing.at:74: $PREPARSER ./input +stderr: +./regression.at:1420: diff lalr.c ielr.c +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +618. existing.at:74: ok +648. regression.at:1408: ok + + +654. regression.at:1875: testing Lex and parse params: glr.c ... +./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./regression.at:1222: $PREPARSER ./expect2 +655. regression.at:1876: testing Lex and parse params: lalr1.cc ... +./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +656. regression.at:1877: testing Lex and parse params: glr.cc ... +./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +643. regression.at:1222: ok + +./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./regression.at:1874: $PREPARSER ./input +stderr: +./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: testing Lex and parse params: glr2.cc ... +./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +653. regression.at:1874: ok + +stderr: +stdout: +./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./regression.at:1713: grep 'syntax error,' stderr.txt +./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +658. regression.at:1889: testing stdio.h is not needed ... +./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +658. regression.at:1889: ok + +659. push.at:25: testing Memory Leak for Early Deletion ... +./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1714: grep 'syntax error,' stderr.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +stderr: +stdout: +./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./push.at:76: $PREPARSER ./input +stderr: +./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +659. push.at:25: ok +stderr: +stdout: +./regression.at:1875: $PREPARSER ./input + +stderr: +./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +654. regression.at:1875: ok +./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +stderr: +stdout: +./regression.at:1772: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +LAC: initial context established for "end of file" +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +memory exhausted +Cleanup: discarding lookahead token "end of file" () +Stack now 0 +./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +660. push.at:84: testing Multiple impure instances ... +./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +661. push.at:145: testing Unsupported Skeletons ... +./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +661. push.at:145: ok + +662. push.at:167: testing Pstate reuse ... +./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./push.at:134: $PREPARSER ./input +stderr: +./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1876: $PREPARSER ./input +stderr: +./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +655. regression.at:1876: ok + +stderr: +stdout: +./push.at:135: $PREPARSER ./input +stderr: +stderr: +./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +660. push.at:84: ./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror + ok +stderr: +stderr: +./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:1715: grep 'syntax error,' stderr.txt +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt + +./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +stderr: +stdout: +./regression.at:1877: $PREPARSER ./input +stderr: +./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +663. c++.at:26: testing C++ Locations Unit Tests ... +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +656. regression.at:1877: ok +stderr: + +stdout: +./push.at:277: ./input +662. push.at:167: ok + +./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... +./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy +665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy +666. c++.at:566: testing Variants lalr1.cc ... +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 16 | | "twice" exp { $$ = $2 + $2; } + | ^~ +input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~~~ +input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~ +./c++.at:263: sed 's,.*/$,,' stderr 1>&2 +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error +stderr: +stdout: +./torture.at:395: $PREPARSER ./input +stderr: +./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +613. torture.at:385: ok + +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none +667. c++.at:567: testing Variants lalr1.cc parse.assert ... +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +665. c++.at:247: ok +stderr: +stdout: + +./regression.at:1788: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" () +LAC: initial context established for "invalid token" +LAC: checking lookahead "invalid token": Always Err +Constructing syntax error message +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +syntax error +memory exhausted +Cleanup: discarding lookahead token "invalid token" () +Stack now 0 +./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +652. regression.at:1739: ok + +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stdout: +./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1716: grep 'syntax error,' stderr.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./regression.at:1878: $PREPARSER ./input +stderr: +./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: ok + +stderr: +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: grep 'syntax error,' stderr.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y +651. regression.at:1628: skipped (regression.at:1727) +stderr: + +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:566: $here/modern +stdout: +Modern C++: 201703 +./c++.at:566: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:568: $here/modern +stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +./c++.at:567: $here/modern +stdout: +Modern C++: 201703 +./c++.at:567: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern +stdout: +Modern C++: 201703 +./c++.at:569: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:570: $here/modern +stdout: +Modern C++: 201703 +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stdout: +./c++.at:566: $here/modern +stdout: +Legac++ +./c++.at:566: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +stderr: +stdout: +./c++.at:571: $here/modern +stdout: +Modern C++: 201703 +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:567: $here/modern +stdout: +Legac++ +./c++.at:567: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:568: $here/modern +stdout: +Legac++ +./c++.at:568: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./regression.at:1788: $PREPARSER ./input --debug +./c++.at:235: $PREPARSER ./list stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" () -LAC: initial context established for "invalid token" -LAC: checking lookahead "invalid token": Always Err -Constructing syntax error message -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -syntax error -memory exhausted -Cleanup: discarding lookahead token "invalid token" () -Stack now 0 -./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -679. c++.at:854: testing Absolute namespace references ... -./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -652. regression.at:1739: ok -680. c++.at:863: testing Syntactically invalid namespace references ... -./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy - +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -681. c++.at:884: testing Syntax error discarding no lookahead ... +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern +stdout: +Legac++ +./c++.at:569: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./existing.at:74: $PREPARSER ./input +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -618. existing.at:74: ok - -./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... -./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -680. c++.at:863: ======== Testing with C++ standard flags: '' - ok -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS - +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:92: $PREPARSER ./input @@ -248779,218 +249317,240 @@ ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -683. c++.at:1065: testing Syntax error as exception: glr.cc ... -./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stdout: +./c++.at:570: $here/modern ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +Legac++ +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +./c++.at:566: $here/modern +stdout: +Legac++ +./c++.at:566: $PREPARSER ./list stderr: -./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1716: grep 'syntax error,' stderr.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y -./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: +stderr: ./c++.at:235: $PREPARSER ./list +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: ./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./regression.at:1878: $PREPARSER ./input +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -657. regression.at:1878: ok - -684. c++.at:1066: testing Syntax error as exception: glr2.cc ... +stdout: +./c++.at:567: $here/modern stderr: stdout: -./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:849: $PREPARSER ./input +./c++.at:571: $here/modern +stdout: +stdout: +Legac++ +Legac++ +./c++.at:567: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: +./c++.at:568: $here/modern stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +Legac++ +./c++.at:568: $PREPARSER ./list stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:92: $PREPARSER ./input stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stdout: -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 ./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error stdout: -./c++.at:659: $PREPARSER ./input +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stdout: +./c++.at:569: $here/modern +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +Legac++ +./c++.at:569: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:566: $here/modern stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" @@ -249019,26 +249579,50 @@ ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $here/modern +stdout: +Legac++ +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:92: $PREPARSER ./input @@ -249049,9 +249633,15 @@ ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./existing.at:1460: $PREPARSER ./input +stderr: +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +624. existing.at:1460: stderr: + ok +stdout: ./c++.at:567: $here/modern stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" @@ -249076,13 +249666,54 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ======== Testing with C++ standard flags: '' ./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:571: $here/modern +stdout: +Legac++ +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: ./c++.at:568: $here/modern stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:568: $PREPARSER ./list stderr: Destroy: "" @@ -249109,16 +249740,28 @@ ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:569: $here/modern stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" @@ -249147,10 +249790,10 @@ ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $here/modern +./c++.at:566: $here/modern stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list +Modern C++: 201402 +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249173,14 +249816,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:570: $here/modern stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" @@ -249207,35 +249852,19 @@ ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -======== Testing with C++ standard flags: '' +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: ./c++.at:571: $here/modern -./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 201703 +Modern C++: 201103 ./c++.at:571: $PREPARSER ./list -./regression.at:1719: grep 'syntax error,' stderr.txt stderr: Destroy: "0" Destroy: "0" @@ -249259,63 +249888,27 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt ======== Testing with C++ standard flags: '' ./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -651. regression.at:1628: skipped (regression.at:1727) - +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -685. c++.at:1360: testing Exception safety with error recovery ... -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +./c++.at:92: $PREPARSER ./input stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:567: $here/modern stdout: -Modern C++: 201703 -./c++.at:573: $PREPARSER ./list +Modern C++: 201402 +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249338,27 +249931,22 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +663. c++.at:26: ok +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... ======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern +./c++.at:568: $here/modern stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list +Modern C++: 201402 +./c++.at:568: $PREPARSER ./list stderr: Destroy: "" Destroy: "" @@ -249381,124 +249969,70 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:1360: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: +./c++.at:572: $here/modern stdout: -./torture.at:395: $PREPARSER ./input +Modern C++: 201703 +./c++.at:572: $PREPARSER ./list stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -613. torture.at:385: ok - +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input +./c++.at:235: $PREPARSER ./list +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -686. c++.at:1361: testing Exception safety without error recovery ... -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:659: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: +./c++.at:569: $here/modern stdout: -./c++.at:1361: ./exceptions || exit 77 +Modern C++: 201402 +./c++.at:569: $PREPARSER ./list stderr: -Inner caught -Outer caught -./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy stderr: stdout: ./c++.at:566: $here/modern stdout: -Legac++ +Modern C++: 201703 ./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" @@ -249522,88 +250056,216 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:941: $PREPARSER ./input +./c++.at:570: $here/modern +stdout: +Modern C++: 201402 +./c++.at:570: $PREPARSER ./list stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./c++.at:1064: $PREPARSER ./input < in +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in +./c++.at:571: $here/modern +stdout: +Modern C++: 201402 +./c++.at:571: $PREPARSER ./list stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:567: $here/modern +stdout: +Modern C++: 201703 +./c++.at:567: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:849: $PREPARSER ./input +./c++.at:568: $here/modern +stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:573: $here/modern +stdout: +Modern C++: 201703 +./c++.at:573: $PREPARSER ./list stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -stdout: -./c++.at:568: $here/modern -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: $here/modern stdout: Legac++ -./c++.at:568: $PREPARSER ./list +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249626,40 +250288,40 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./existing.at:808: $PREPARSER ./input +stderr: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +621. existing.at:808: ok + +stderr: stdout: -Legac++ -./c++.at:567: $PREPARSER ./list +./c++.at:566: $here/modern stdout: -Legac++ +Modern C++: 202002 +./c++.at:566: $PREPARSER ./list stderr: -./c++.at:570: $PREPARSER ./list Destroy: "0" Destroy: "0" Destroy: 1 @@ -249681,7 +250343,24 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +664. c++.at:107: ok + +675. c++.at:584: testing Variants and Typed Midrule Actions ... +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:570: $here/modern +stdout: +Modern C++: 201703 +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249705,20 +250384,15 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:569: $here/modern stdout: -Legac++ +Modern C++: 201703 ./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" @@ -249745,12 +250419,20 @@ ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:573: $here/modern stdout: Legac++ -./c++.at:571: $PREPARSER ./list +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249773,18 +250455,51 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:235: $PREPARSER ./list +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:571: $here/modern +stdout: +Modern C++: 201703 +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:572: $here/modern @@ -249818,54 +250533,72 @@ ./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:941: $PREPARSER ./input +./c++.at:568: $here/modern +stdout: +Modern C++: 202002 +./c++.at:568: $PREPARSER ./list stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: +./c++.at:567: $here/modern stdout: +Modern C++: 202002 +./c++.at:567: $PREPARSER ./list stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1360: $PREPARSER ./input i stdout: -stderr: ./c++.at:659: $PREPARSER ./input -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap stderr: +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Starting parse Entering state 0 Stack now 0 @@ -249910,245 +250643,183 @@ Cleanup: popping token EOI () Cleanup: popping nterm expr (40) destroy: 40 -stderr: ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56ccda00->Object::Object { } -Next token is token 'a' (0x56ccda00 'a') -Shifting token 'a' (0x56ccda00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda00 'a') --> $$ = nterm item (0x56ccda00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56ccda30->Object::Object { 0x56ccda00 } -Next token is token 'a' (0x56ccda30 'a') -Shifting token 'a' (0x56ccda30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda30 'a') --> $$ = nterm item (0x56ccda30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56ccda60->Object::Object { 0x56ccda00, 0x56ccda30 } -Next token is token 'a' (0x56ccda60 'a') -Shifting token 'a' (0x56ccda60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda60 'a') --> $$ = nterm item (0x56ccda60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56ccda90->Object::Object { 0x56ccda00, 0x56ccda30, 0x56ccda60 } -Next token is token 'a' (0x56ccda90 'a') -Shifting token 'a' (0x56ccda90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda90 'a') --> $$ = nterm item (0x56ccda90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56ccdac0->Object::Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90 } -Next token is token 'p' (0x56ccdac0 'p'Exception caught: cleaning lookahead and stack -0x56ccdac0->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90, 0x56ccdac0 } -0x56ccda90->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90 } -0x56ccda60->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60 } -0x56ccda30->Object::~Object { 0x56ccda00, 0x56ccda30 } -0x56ccda00->Object::~Object { 0x56ccda00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56ccda00->Object::Object { } -Next token is token 'a' (0x56ccda00 'a') -Shifting token 'a' (0x56ccda00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda00 'a') --> $$ = nterm item (0x56ccda00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56ccda30->Object::Object { 0x56ccda00 } -Next token is token 'a' (0x56ccda30 'a') -Shifting token 'a' (0x56ccda30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda30 'a') --> $$ = nterm item (0x56ccda30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56ccda60->Object::Object { 0x56ccda00, 0x56ccda30 } -Next token is token 'a' (0x56ccda60 'a') -Shifting token 'a' (0x56ccda60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda60 'a') --> $$ = nterm item (0x56ccda60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56ccda90->Object::Object { 0x56ccda00, 0x56ccda30, 0x56ccda60 } -Next token is token 'a' (0x56ccda90 'a') -Shifting token 'a' (0x56ccda90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56ccda90 'a') --> $$ = nterm item (0x56ccda90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56ccdac0->Object::Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90 } -Next token is token 'p' (0x56ccdac0 'p'Exception caught: cleaning lookahead and stack -0x56ccdac0->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90, 0x56ccdac0 } -0x56ccda90->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60, 0x56ccda90 } -0x56ccda60->Object::~Object { 0x56ccda00, 0x56ccda30, 0x56ccda60 } -0x56ccda30->Object::~Object { 0x56ccda00, 0x56ccda30 } -0x56ccda00->Object::~Object { 0x56ccda00 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1066: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:849: $PREPARSER ./input +./c++.at:569: $here/modern +stdout: +Modern C++: 202002 +./c++.at:569: $PREPARSER ./list stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:659: $PREPARSER ./input stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $here/modern +stdout: +Modern C++: 201703 +./c++.at:574: $PREPARSER ./list ======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: stdout: -./c++.at:1066: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' ./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stderr: stdout: +./c++.at:570: $here/modern stdout: -./c++.at:1066: ./check -./c++.at:1064: $PREPARSER ./input < in -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +Modern C++: 202002 +./c++.at:570: $PREPARSER ./list stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:573: $here/modern stdout: Legac++ +stderr: ./c++.at:573: $PREPARSER ./list +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: Destroy: "0" Destroy: "0" @@ -250171,180 +250842,216 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:572: $here/modern +stdout: +Modern C++: 201103 +./c++.at:572: $PREPARSER ./list stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:571: $here/modern +stdout: +Modern C++: 202002 +./c++.at:571: $PREPARSER ./list stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i +stdout: +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $here/modern +stdout: +Modern C++: 202302 +./c++.at:566: $PREPARSER ./list stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57933a00->Object::Object { } -Next token is token 'a' (0x57933a00 'a') -Shifting token 'a' (0x57933a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a00 'a') --> $$ = nterm item (0x57933a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57933a30->Object::Object { 0x57933a00 } -Next token is token 'a' (0x57933a30 'a') -Shifting token 'a' (0x57933a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a30 'a') --> $$ = nterm item (0x57933a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57933a60->Object::Object { 0x57933a00, 0x57933a30 } -Next token is token 'a' (0x57933a60 'a') -Shifting token 'a' (0x57933a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a60 'a') --> $$ = nterm item (0x57933a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x57933a90->Object::Object { 0x57933a00, 0x57933a30, 0x57933a60 } -Next token is token 'a' (0x57933a90 'a') -Shifting token 'a' (0x57933a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a90 'a') --> $$ = nterm item (0x57933a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x57933ac0->Object::Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90 } -Next token is token 'p' (0x57933ac0 'p'Exception caught: cleaning lookahead and stack -0x57933ac0->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90, 0x57933ac0 } -0x57933a90->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90 } -0x57933a60->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60 } -0x57933a30->Object::~Object { 0x57933a00, 0x57933a30 } -0x57933a00->Object::~Object { 0x57933a00 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stdout: +./c++.at:659: $PREPARSER ./input +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57933a00->Object::Object { } -Next token is token 'a' (0x57933a00 'a') -Shifting token 'a' (0x57933a00 'a') +Next token is token NUMBER (1) +Shifting token NUMBER (1) Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a00 'a') --> $$ = nterm item (0x57933a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57933a30->Object::Object { 0x57933a00 } -Next token is token 'a' (0x57933a30 'a') -Shifting token 'a' (0x57933a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a30 'a') --> $$ = nterm item (0x57933a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57933a60->Object::Object { 0x57933a00, 0x57933a30 } -Next token is token 'a' (0x57933a60 'a') -Shifting token 'a' (0x57933a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a60 'a') --> $$ = nterm item (0x57933a60 'a') -Entering state 10 -Stack now 0 10 10 10 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 Reading a token -0x57933a90->Object::Object { 0x57933a00, 0x57933a30, 0x57933a60 } -Next token is token 'a' (0x57933a90 'a') -Shifting token 'a' (0x57933a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57933a90 'a') --> $$ = nterm item (0x57933a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 Reading a token -0x57933ac0->Object::Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90 } -Next token is token 'p' (0x57933ac0 'p'Exception caught: cleaning lookahead and stack -0x57933ac0->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90, 0x57933ac0 } -0x57933a90->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60, 0x57933a90 } -0x57933a60->Object::~Object { 0x57933a00, 0x57933a30, 0x57933a60 } -0x57933a30->Object::~Object { 0x57933a00, 0x57933a30 } -0x57933a00->Object::~Object { 0x57933a00 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +666. c++.at:566: ok +stderr: stdout: -exception caught: printer +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS + stderr: stdout: -./c++.at:574: $here/modern -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:568: $here/modern stdout: +Modern C++: 202302 +./c++.at:568: $PREPARSER ./list stderr: -exception caught: syntax error +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +668. c++.at:568: ok +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS + +676. c++.at:794: testing Doxygen Public Documentation ... +677. c++.at:795: testing Doxygen Private Documentation ... +./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern +stdout: Legac++ ./c++.at:574: $PREPARSER ./list -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -250367,28 +251074,36 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:1361: $PREPARSER ./input aaaaE ./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:795: doxygen --version || exit 77 ======== Testing with C++ standard flags: '' ./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1361: $PREPARSER ./input aaaaT -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR +./c++.at:794: doxygen --version || exit 77 +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2024-12-29 02:14:23.070734958 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found +--- /dev/null 2024-12-28 07:43:36.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2024-12-29 02:14:23.066735122 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +stdout: +stdout: +677. c++.at:795: 676. c++.at:794: skipped (c++.at:795) + skipped (c++.at:794) + + stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: stdout: -./c++.at:566: $here/modern +./c++.at:573: $here/modern stdout: -Legac++ -./c++.at:566: $PREPARSER ./list +Modern C++: 201103 +678. c++.at:848: testing Relative namespace references ... +./c++.at:573: $PREPARSER ./list +679. c++.at:854: testing Absolute namespace references ... +./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: Destroy: "0" Destroy: "0" @@ -250411,48 +251126,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' ./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: ./c++.at:659: $PREPARSER ./input stderr: @@ -250506,10 +251191,39 @@ ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:568: $here/modern +./c++.at:572: $here/modern +stderr: stdout: -Legac++ -./c++.at:568: $PREPARSER ./list +stdout: +Modern C++: 201402 +./c++.at:572: $PREPARSER ./list +./c++.at:567: $here/modern +stderr: +stdout: +Modern C++: 202302 +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: $PREPARSER ./list +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -250532,202 +251246,148 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +667. c++.at:567: ok + +680. c++.at:863: testing Syntactically invalid namespace references ... +./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stdout: -./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:849: $PREPARSER ./input stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +680. c++.at:863: ok + +681. c++.at:884: testing Syntax error discarding no lookahead ... +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap +stdout: +./c++.at:569: $here/modern +stdout: +Modern C++: 202302 +./c++.at:569: $PREPARSER ./list stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57b67a00->Object::Object { } -Next token is token 'a' (0x57b67a00 'a') -Shifting token 'a' (0x57b67a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a00 'a') --> $$ = nterm item (0x57b67a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x57b67a30->Object::Object { 0x57b67a00 } -Next token is token 'a' (0x57b67a30 'a') -Shifting token 'a' (0x57b67a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a30 'a') --> $$ = nterm item (0x57b67a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x57b67a60->Object::Object { 0x57b67a00, 0x57b67a30 } -Next token is token 'a' (0x57b67a60 'a') -Shifting token 'a' (0x57b67a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a60 'a') --> $$ = nterm item (0x57b67a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x57b67a90->Object::Object { 0x57b67a00, 0x57b67a30, 0x57b67a60 } -Next token is token 'a' (0x57b67a90 'a') -Shifting token 'a' (0x57b67a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a90 'a') --> $$ = nterm item (0x57b67a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x57b67ac0->Object::Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90 } -Next token is token 'p' (0x57b67ac0 'p'Exception caught: cleaning lookahead and stack -0x57b67ac0->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90, 0x57b67ac0 } -0x57b67a90->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90 } -0x57b67a60->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60 } -0x57b67a30->Object::~Object { 0x57b67a00, 0x57b67a30 } -0x57b67a00->Object::~Object { 0x57b67a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:659: $PREPARSER ./input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57b67a00->Object::Object { } -Next token is token 'a' (0x57b67a00 'a') -Shifting token 'a' (0x57b67a00 'a') +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a00 'a') --> $$ = nterm item (0x57b67a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x57b67a30->Object::Object { 0x57b67a00 } -Next token is token 'a' (0x57b67a30 'a') -Shifting token 'a' (0x57b67a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a30 'a') --> $$ = nterm item (0x57b67a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x57b67a60->Object::Object { 0x57b67a00, 0x57b67a30 } -Next token is token 'a' (0x57b67a60 'a') -Shifting token 'a' (0x57b67a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a60 'a') --> $$ = nterm item (0x57b67a60 'a') -Entering state 11 -Stack now 0 11 11 11 Reading a token -0x57b67a90->Object::Object { 0x57b67a00, 0x57b67a30, 0x57b67a60 } -Next token is token 'a' (0x57b67a90 'a') -Shifting token 'a' (0x57b67a90 'a') +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b67a90 'a') --> $$ = nterm item (0x57b67a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 +Stack now 0 2 Reading a token -0x57b67ac0->Object::Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90 } -Next token is token 'p' (0x57b67ac0 'p'Exception caught: cleaning lookahead and stack -0x57b67ac0->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90, 0x57b67ac0 } -0x57b67a90->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60, 0x57b67a90 } -0x57b67a60->Object::~Object { 0x57b67a00, 0x57b67a30, 0x57b67a60 } -0x57b67a30->Object::~Object { 0x57b67a00, 0x57b67a30 } -0x57b67a00->Object::~Object { 0x57b67a00 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:849: $PREPARSER ./input stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:235: $PREPARSER ./list -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1064: $PREPARSER ./input < in +stdout: +./c++.at:855: $PREPARSER ./input +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern +./c++.at:570: $here/modern stdout: -Legac++ -./c++.at:567: $PREPARSER ./list +Modern C++: 202302 +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250750,18 +251410,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +669. c++.at:569: ok stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS + +stderr: stdout: -./c++.at:570: $here/modern +./c++.at:574: $here/modern stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS Legac++ -./c++.at:570: $PREPARSER ./list +./c++.at:574: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250784,48 +251444,71 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... +./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: +./c++.at:571: $here/modern +stdout: +Modern C++: 202302 +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:92: $PREPARSER ./input -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $PREPARSER ./input stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:849: $PREPARSER ./input stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:569: $here/modern +./c++.at:573: $here/modern stdout: -./c++.at:1065: $PREPARSER ./input < in -Legac++ -./c++.at:569: $PREPARSER ./list -stderr: +Modern C++: 201402 +./c++.at:573: $PREPARSER ./list stderr: -error: invalid character Destroy: "0" Destroy: "0" Destroy: 1 @@ -250847,22 +251530,30 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +671. c++.at:571: ok + +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +683. c++.at:1065: testing Syntax error as exception: glr.cc ... +./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:571: $here/modern +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: +670. c++.at:570: ok +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -Legac++ -./c++.at:571: $PREPARSER ./list -./c++.at:1361: $PREPARSER ./input aaaas -stderr: +./c++.at:572: $here/modern +stdout: +Modern C++: 201703 +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250885,160 +251576,89 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +684. c++.at:1066: testing Syntax error as exception: glr2.cc ... stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap +stdout: +./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:659: $PREPARSER ./input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57298a00->Object::Object { } -Next token is token 'a' (0x57298a00 'a') -Shifting token 'a' (0x57298a00 'a') +Next token is token NUMBER (1) +Shifting token NUMBER (1) Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a00 'a') --> $$ = nterm item (0x57298a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57298a30->Object::Object { 0x57298a00 } -Next token is token 'a' (0x57298a30 'a') -Shifting token 'a' (0x57298a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a30 'a') --> $$ = nterm item (0x57298a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57298a60->Object::Object { 0x57298a00, 0x57298a30 } -Next token is token 'a' (0x57298a60 'a') -Shifting token 'a' (0x57298a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a60 'a') --> $$ = nterm item (0x57298a60 'a') -Entering state 10 -Stack now 0 10 10 10 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 Reading a token -0x57298a90->Object::Object { 0x57298a00, 0x57298a30, 0x57298a60 } -Next token is token 'a' (0x57298a90 'a') -Shifting token 'a' (0x57298a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a90 'a') --> $$ = nterm item (0x57298a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 Reading a token -0x57298ac0->Object::Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90 } -Next token is token 'p' (0x57298ac0 'p'Exception caught: cleaning lookahead and stack -0x57298ac0->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90, 0x57298ac0 } -0x57298a90->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90 } -0x57298a60->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60 } -0x57298a30->Object::~Object { 0x57298a00, 0x57298a30 } -0x57298a00->Object::~Object { 0x57298a00 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57298a00->Object::Object { } -Next token is token 'a' (0x57298a00 'a') -Shifting token 'a' (0x57298a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a00 'a') --> $$ = nterm item (0x57298a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57298a30->Object::Object { 0x57298a00 } -Next token is token 'a' (0x57298a30 'a') -Shifting token 'a' (0x57298a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a30 'a') --> $$ = nterm item (0x57298a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57298a60->Object::Object { 0x57298a00, 0x57298a30 } -Next token is token 'a' (0x57298a60 'a') -Shifting token 'a' (0x57298a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a60 'a') --> $$ = nterm item (0x57298a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x57298a90->Object::Object { 0x57298a00, 0x57298a30, 0x57298a60 } -Next token is token 'a' (0x57298a90 'a') -Shifting token 'a' (0x57298a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57298a90 'a') --> $$ = nterm item (0x57298a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x57298ac0->Object::Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90 } -Next token is token 'p' (0x57298ac0 'p'Exception caught: cleaning lookahead and stack -0x57298ac0->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90, 0x57298ac0 } -0x57298a90->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60, 0x57298a90 } -0x57298a60->Object::~Object { 0x57298a00, 0x57298a30, 0x57298a60 } -0x57298a30->Object::~Object { 0x57298a00, 0x57298a30 } -0x57298a00->Object::~Object { 0x57298a00 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE +stdout: +./c++.at:849: $PREPARSER ./input stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR +stdout: +./c++.at:941: $PREPARSER ./input stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:855: $PREPARSER ./input @@ -251046,7 +251666,12 @@ ./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in stderr: stdout: ./c++.at:849: $PREPARSER ./input @@ -251055,6 +251680,22 @@ ======== Testing with C++ standard flags: '' ./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: stdout: ./c++.at:941: $PREPARSER ./input stderr: @@ -251067,10 +251708,201 @@ ./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Modern C++: 201103 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:659: $PREPARSER ./input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:573: $here/modern +stdout: +Modern C++: 201703 +./c++.at:573: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +stderr: +stdout: ./c++.at:572: $here/modern +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Legac++ +Modern C++: 202002 ./c++.at:572: $PREPARSER ./list +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: Destroy: "0" Destroy: "0" @@ -251096,13 +251928,141 @@ ./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:849: $PREPARSER ./input +stderr: +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Modern C++: 201402 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1066: $PREPARSER ./input < in +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:659: $PREPARSER ./input @@ -251152,14 +252112,88 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +675. c++.at:584: ok +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS + +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:573: $here/modern stdout: -Legac++ +Modern C++: 202002 ./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" @@ -251186,16 +252220,162 @@ ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +685. c++.at:1360: testing Exception safety with error recovery ... +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 stderr: stdout: -./c++.at:566: $here/modern +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -Modern C++: 201103 -./c++.at:566: $PREPARSER ./list +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:1360: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +======== Testing with C++ standard flags: '' +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:574: $here/modern +./c++.at:1066: $PREPARSER ./input < in +stdout: +Modern C++: 201703 +./c++.at:574: $PREPARSER ./list +stderr: +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +error: invalid character +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +stdout: +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:572: $here/modern +stdout: +Modern C++: 202302 +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251218,18 +252398,196 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +672. c++.at:572: ok + +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +681. c++.at:884: ok + +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +stderr: +error: invalid character +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +686. c++.at:1361: testing Exception safety without error recovery ... +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1362: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +======== Testing with C++ standard flags: '' +./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1361: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' stdout: ./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -251247,57 +252605,57 @@ Entering state 0 Stack now 0 Reading a token -0x57299a00->Object::Object { } -Next token is token 'a' (0x57299a00 'a') -Shifting token 'a' (0x57299a00 'a') +0x56e80a00->Object::Object { } +Next token is token 'a' (0x56e80a00 'a') +Shifting token 'a' (0x56e80a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a00 'a') --> $$ = nterm item (0x57299a00 'a') + $1 = token 'a' (0x56e80a00 'a') +-> $$ = nterm item (0x56e80a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57299a30->Object::Object { 0x57299a00 } -Next token is token 'a' (0x57299a30 'a') -Shifting token 'a' (0x57299a30 'a') +0x56e80a30->Object::Object { 0x56e80a00 } +Next token is token 'a' (0x56e80a30 'a') +Shifting token 'a' (0x56e80a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a30 'a') --> $$ = nterm item (0x57299a30 'a') + $1 = token 'a' (0x56e80a30 'a') +-> $$ = nterm item (0x56e80a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57299a60->Object::Object { 0x57299a00, 0x57299a30 } -Next token is token 'a' (0x57299a60 'a') -Shifting token 'a' (0x57299a60 'a') +0x56e80a60->Object::Object { 0x56e80a00, 0x56e80a30 } +Next token is token 'a' (0x56e80a60 'a') +Shifting token 'a' (0x56e80a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a60 'a') --> $$ = nterm item (0x57299a60 'a') + $1 = token 'a' (0x56e80a60 'a') +-> $$ = nterm item (0x56e80a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57299a90->Object::Object { 0x57299a00, 0x57299a30, 0x57299a60 } -Next token is token 'a' (0x57299a90 'a') -Shifting token 'a' (0x57299a90 'a') +0x56e80a90->Object::Object { 0x56e80a00, 0x56e80a30, 0x56e80a60 } +Next token is token 'a' (0x56e80a90 'a') +Shifting token 'a' (0x56e80a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a90 'a') --> $$ = nterm item (0x57299a90 'a') + $1 = token 'a' (0x56e80a90 'a') +-> $$ = nterm item (0x56e80a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57299ac0->Object::Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90 } -Next token is token 'p' (0x57299ac0 'p'Exception caught: cleaning lookahead and stack -0x57299ac0->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90, 0x57299ac0 } -0x57299a90->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90 } -0x57299a60->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60 } -0x57299a30->Object::~Object { 0x57299a00, 0x57299a30 } -0x57299a00->Object::~Object { 0x57299a00 } +0x56e80ac0->Object::Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90 } +Next token is token 'p' (0x56e80ac0 'p'Exception caught: cleaning lookahead and stack +0x56e80ac0->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90, 0x56e80ac0 } +0x56e80a90->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90 } +0x56e80a60->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60 } +0x56e80a30->Object::~Object { 0x56e80a00, 0x56e80a30 } +0x56e80a00->Object::~Object { 0x56e80a00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -251306,57 +252664,57 @@ Entering state 0 Stack now 0 Reading a token -0x57299a00->Object::Object { } -Next token is token 'a' (0x57299a00 'a') -Shifting token 'a' (0x57299a00 'a') +0x56e80a00->Object::Object { } +Next token is token 'a' (0x56e80a00 'a') +Shifting token 'a' (0x56e80a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a00 'a') --> $$ = nterm item (0x57299a00 'a') + $1 = token 'a' (0x56e80a00 'a') +-> $$ = nterm item (0x56e80a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57299a30->Object::Object { 0x57299a00 } -Next token is token 'a' (0x57299a30 'a') -Shifting token 'a' (0x57299a30 'a') +0x56e80a30->Object::Object { 0x56e80a00 } +Next token is token 'a' (0x56e80a30 'a') +Shifting token 'a' (0x56e80a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a30 'a') --> $$ = nterm item (0x57299a30 'a') + $1 = token 'a' (0x56e80a30 'a') +-> $$ = nterm item (0x56e80a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57299a60->Object::Object { 0x57299a00, 0x57299a30 } -Next token is token 'a' (0x57299a60 'a') -Shifting token 'a' (0x57299a60 'a') +0x56e80a60->Object::Object { 0x56e80a00, 0x56e80a30 } +Next token is token 'a' (0x56e80a60 'a') +Shifting token 'a' (0x56e80a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a60 'a') --> $$ = nterm item (0x57299a60 'a') + $1 = token 'a' (0x56e80a60 'a') +-> $$ = nterm item (0x56e80a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57299a90->Object::Object { 0x57299a00, 0x57299a30, 0x57299a60 } -Next token is token 'a' (0x57299a90 'a') -Shifting token 'a' (0x57299a90 'a') +0x56e80a90->Object::Object { 0x56e80a00, 0x56e80a30, 0x56e80a60 } +Next token is token 'a' (0x56e80a90 'a') +Shifting token 'a' (0x56e80a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57299a90 'a') --> $$ = nterm item (0x57299a90 'a') + $1 = token 'a' (0x56e80a90 'a') +-> $$ = nterm item (0x56e80a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57299ac0->Object::Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90 } -Next token is token 'p' (0x57299ac0 'p'Exception caught: cleaning lookahead and stack -0x57299ac0->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90, 0x57299ac0 } -0x57299a90->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60, 0x57299a90 } -0x57299a60->Object::~Object { 0x57299a00, 0x57299a30, 0x57299a60 } -0x57299a30->Object::~Object { 0x57299a00, 0x57299a30 } -0x57299a00->Object::~Object { 0x57299a00 } +0x56e80ac0->Object::Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90 } +Next token is token 'p' (0x56e80ac0 'p'Exception caught: cleaning lookahead and stack +0x56e80ac0->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90, 0x56e80ac0 } +0x56e80a90->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60, 0x56e80a90 } +0x56e80a60->Object::~Object { 0x56e80a00, 0x56e80a30, 0x56e80a60 } +0x56e80a30->Object::~Object { 0x56e80a00, 0x56e80a30 } +0x56e80a00->Object::~Object { 0x56e80a00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr @@ -251382,9 +252740,41 @@ stdout: ./c++.at:574: $here/modern stdout: -Legac++ +Modern C++: 202002 ./c++.at:574: $PREPARSER ./list stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:573: $here/modern +stdout: +Modern C++: 202302 +./c++.at:573: $PREPARSER ./list +stderr: Destroy: "0" Destroy: "0" Destroy: 1 @@ -251406,9 +252796,592 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x57c70a00->Object::Object { } +Next token is token 'a' (0x57c70a00 'a') +Shifting token 'a' (0x57c70a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a00 'a') +-> $$ = nterm item (0x57c70a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x57c70a30->Object::Object { 0x57c70a00 } +Next token is token 'a' (0x57c70a30 'a') +Shifting token 'a' (0x57c70a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a30 'a') +-> $$ = nterm item (0x57c70a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x57c70a60->Object::Object { 0x57c70a00, 0x57c70a30 } +Next token is token 'a' (0x57c70a60 'a') +Shifting token 'a' (0x57c70a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a60 'a') +-> $$ = nterm item (0x57c70a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x57c70a90->Object::Object { 0x57c70a00, 0x57c70a30, 0x57c70a60 } +Next token is token 'a' (0x57c70a90 'a') +Shifting token 'a' (0x57c70a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a90 'a') +-> $$ = nterm item (0x57c70a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x57c70ac0->Object::Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90 } +Next token is token 'p' (0x57c70ac0 'p'Exception caught: cleaning lookahead and stack +0x57c70ac0->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90, 0x57c70ac0 } +0x57c70a90->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90 } +0x57c70a60->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60 } +0x57c70a30->Object::~Object { 0x57c70a00, 0x57c70a30 } +0x57c70a00->Object::~Object { 0x57c70a00 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x57c70a00->Object::Object { } +Next token is token 'a' (0x57c70a00 'a') +Shifting token 'a' (0x57c70a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a00 'a') +-> $$ = nterm item (0x57c70a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x57c70a30->Object::Object { 0x57c70a00 } +Next token is token 'a' (0x57c70a30 'a') +Shifting token 'a' (0x57c70a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a30 'a') +-> $$ = nterm item (0x57c70a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x57c70a60->Object::Object { 0x57c70a00, 0x57c70a30 } +Next token is token 'a' (0x57c70a60 'a') +Shifting token 'a' (0x57c70a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a60 'a') +-> $$ = nterm item (0x57c70a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x57c70a90->Object::Object { 0x57c70a00, 0x57c70a30, 0x57c70a60 } +Next token is token 'a' (0x57c70a90 'a') +Shifting token 'a' (0x57c70a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57c70a90 'a') +-> $$ = nterm item (0x57c70a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x57c70ac0->Object::Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90 } +Next token is token 'p' (0x57c70ac0 'p'Exception caught: cleaning lookahead and stack +0x57c70ac0->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90, 0x57c70ac0 } +0x57c70a90->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60, 0x57c70a90 } +0x57c70a60->Object::~Object { 0x57c70a00, 0x57c70a30, 0x57c70a60 } +0x57c70a30->Object::~Object { 0x57c70a00, 0x57c70a30 } +0x57c70a00->Object::~Object { 0x57c70a00 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stdout: +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +stdout: +stderr: +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffbe0aaf->Object::Object { } +0xffbe0b58->Object::Object { 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433c4->Object::Object { 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 2 +0xffbe0b68->Object::Object { 0x568433c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433c4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433c4->Object::~Object { 0x568433c4, 0xffbe0b68 } +0x568433c4->Object::Object { 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4 } +0xffbe0b58->Object::Object { 0x568433c4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433d4->Object::Object { 0x568433c4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433d4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433d4->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +0x568433d4->Object::Object { 0x568433c4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433e4->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433e4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433e4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +0x568433e4->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433f4->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433f4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433f4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b68 } +0x568433f4->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'p' (0xffbe0b58 'p'Exception caught: cleaning lookahead and stack +0x568433f4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b58 } +0x568433e4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b58 } +0x568433d4->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b58 } +0x568433c4->Object::~Object { 0x568433c4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0xffbe0b58 } +exception caught: printer +end { } +./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffbe0aaf->Object::Object { } +0xffbe0b58->Object::Object { 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433c4->Object::Object { 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 2 +0xffbe0b68->Object::Object { 0x568433c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433c4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433c4->Object::~Object { 0x568433c4, 0xffbe0b68 } +0x568433c4->Object::Object { 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4 } +0xffbe0b58->Object::Object { 0x568433c4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433d4->Object::Object { 0x568433c4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433d4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433d4->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +0x568433d4->Object::Object { 0x568433c4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433e4->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433e4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433e4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +0x568433e4->Object::Object { 0x568433c4, 0x568433d4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'a' (0xffbe0b58 'a') +0xffbe0aa8->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8, 0xffbe0b58 } +Shifting token 'a' (0xffbe0aa8 'a') +0x568433f4->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0aa8 } +0xffbe0aa8->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aa8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffbe0b68->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x568433f4 'a') +-> $$ = nterm item (0xffbe0b68 'a') +0x568433f4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b68 } +0x568433f4->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b68 } +0xffbe0b68->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b68 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xffbe0aaf->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4 } +0xffbe0b58->Object::Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aaf } +0xffbe0aaf->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0aaf, 0xffbe0b58 } +Next token is token 'p' (0xffbe0b58 'p'Exception caught: cleaning lookahead and stack +0x568433f4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0x568433f4, 0xffbe0b58 } +0x568433e4->Object::~Object { 0x568433c4, 0x568433d4, 0x568433e4, 0xffbe0b58 } +0x568433d4->Object::~Object { 0x568433c4, 0x568433d4, 0xffbe0b58 } +0x568433c4->Object::~Object { 0x568433c4, 0xffbe0b58 } +0xffbe0b58->Object::~Object { 0xffbe0b58 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr +stderr: +exception caught: yylex +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error +./c++.at:1360: $PREPARSER ./input i +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +./c++.at:1360: $PREPARSER ./input aaaap +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56e0ba00->Object::Object { } +Next token is token 'a' (0x56e0ba00 'a') +Shifting token 'a' (0x56e0ba00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba00 'a') +-> $$ = nterm item (0x56e0ba00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56e0ba30->Object::Object { 0x56e0ba00 } +Next token is token 'a' (0x56e0ba30 'a') +Shifting token 'a' (0x56e0ba30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba30 'a') +-> $$ = nterm item (0x56e0ba30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56e0ba60->Object::Object { 0x56e0ba00, 0x56e0ba30 } +Next token is token 'a' (0x56e0ba60 'a') +Shifting token 'a' (0x56e0ba60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba60 'a') +-> $$ = nterm item (0x56e0ba60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56e0ba90->Object::Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60 } +Next token is token 'a' (0x56e0ba90 'a') +Shifting token 'a' (0x56e0ba90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba90 'a') +-> $$ = nterm item (0x56e0ba90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56e0bac0->Object::Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90 } +Next token is token 'p' (0x56e0bac0 'p'Exception caught: cleaning lookahead and stack +0x56e0bac0->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90, 0x56e0bac0 } +0x56e0ba90->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90 } +0x56e0ba60->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60 } +0x56e0ba30->Object::~Object { 0x56e0ba00, 0x56e0ba30 } +0x56e0ba00->Object::~Object { 0x56e0ba00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: $PREPARSER ./input aaaaR +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56e0ba00->Object::Object { } +Next token is token 'a' (0x56e0ba00 'a') +Shifting token 'a' (0x56e0ba00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba00 'a') +-> $$ = nterm item (0x56e0ba00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56e0ba30->Object::Object { 0x56e0ba00 } +Next token is token 'a' (0x56e0ba30 'a') +Shifting token 'a' (0x56e0ba30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba30 'a') +-> $$ = nterm item (0x56e0ba30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56e0ba60->Object::Object { 0x56e0ba00, 0x56e0ba30 } +Next token is token 'a' (0x56e0ba60 'a') +Shifting token 'a' (0x56e0ba60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba60 'a') +-> $$ = nterm item (0x56e0ba60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56e0ba90->Object::Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60 } +Next token is token 'a' (0x56e0ba90 'a') +Shifting token 'a' (0x56e0ba90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56e0ba90 'a') +-> $$ = nterm item (0x56e0ba90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56e0bac0->Object::Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90 } +Next token is token 'p' (0x56e0bac0 'p'Exception caught: cleaning lookahead and stack +0x56e0bac0->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90, 0x56e0bac0 } +0x56e0ba90->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60, 0x56e0ba90 } +0x56e0ba60->Object::~Object { 0x56e0ba00, 0x56e0ba30, 0x56e0ba60 } +0x56e0ba30->Object::~Object { 0x56e0ba00, 0x56e0ba30 } +0x56e0ba00->Object::~Object { 0x56e0ba00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stderr: +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +673. c++.at:573: ok + +688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:1066: $PREPARSER ./input < in @@ -251418,8 +253391,16 @@ error: invalid character caught error ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1066: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' stderr: +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS error: invalid expression ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1066: $PREPARSER ./input < in @@ -251428,7 +253409,6 @@ ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stdout: ./c++.at:1064: $PREPARSER ./input < in stderr: @@ -251439,520 +253419,1069 @@ ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1064: $PREPARSER ./input < in stderr: -stderr: error: invalid expression -stdout: ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: ./c++.at:1064: $PREPARSER ./input < in stderr: error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +======== Testing with C++ standard flags: '' ./c++.at:1066: ./check +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:235: $PREPARSER ./list +./c++.at:1363: ./exceptions || exit 77 stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Inner caught +Outer caught +./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -stderr: -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1065: $PREPARSER ./input < in -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stderr: -stdout: stdout: -./c++.at:568: $here/modern ./c++.at:1361: $PREPARSER ./input aaaas -stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Modern C++: 201103 -./c++.at:568: $PREPARSER ./list exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaal stderr: stderr: -stdout: exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./c++.at:1361: $PREPARSER ./input i +./c++.at:1360: $PREPARSER ./input aaaas stderr: +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1360: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaap stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x56f74a00->Object::Object { } -Next token is token 'a' (0x56f74a00 'a') -Shifting token 'a' (0x56f74a00 'a') +0x56faea00->Object::Object { } +Next token is token 'a' (0x56faea00 'a') +Shifting token 'a' (0x56faea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a00 'a') --> $$ = nterm item (0x56f74a00 'a') + $1 = token 'a' (0x56faea00 'a') +-> $$ = nterm item (0x56faea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x56f74a30->Object::Object { 0x56f74a00 } -Next token is token 'a' (0x56f74a30 'a') -Shifting token 'a' (0x56f74a30 'a') +0x56faea30->Object::Object { 0x56faea00 } +Next token is token 'a' (0x56faea30 'a') +Shifting token 'a' (0x56faea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a30 'a') --> $$ = nterm item (0x56f74a30 'a') + $1 = token 'a' (0x56faea30 'a') +-> $$ = nterm item (0x56faea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x56f74a60->Object::Object { 0x56f74a00, 0x56f74a30 } -Next token is token 'a' (0x56f74a60 'a') -Shifting token 'a' (0x56f74a60 'a') +0x56faea60->Object::Object { 0x56faea00, 0x56faea30 } +Next token is token 'a' (0x56faea60 'a') +Shifting token 'a' (0x56faea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a60 'a') --> $$ = nterm item (0x56f74a60 'a') + $1 = token 'a' (0x56faea60 'a') +-> $$ = nterm item (0x56faea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x56f74a90->Object::Object { 0x56f74a00, 0x56f74a30, 0x56f74a60 } -Next token is token 'a' (0x56f74a90 'a') -Shifting token 'a' (0x56f74a90 'a') +0x56faea90->Object::Object { 0x56faea00, 0x56faea30, 0x56faea60 } +Next token is token 'a' (0x56faea90 'a') +Shifting token 'a' (0x56faea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a90 'a') --> $$ = nterm item (0x56f74a90 'a') + $1 = token 'a' (0x56faea90 'a') +-> $$ = nterm item (0x56faea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x56f74ac0->Object::Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90 } -Next token is token 'p' (0x56f74ac0 'p'Exception caught: cleaning lookahead and stack -0x56f74ac0->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90, 0x56f74ac0 } -0x56f74a90->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90 } -0x56f74a60->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60 } -0x56f74a30->Object::~Object { 0x56f74a00, 0x56f74a30 } -0x56f74a00->Object::~Object { 0x56f74a00 } +0x56faeac0->Object::Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90 } +Next token is token 'p' (0x56faeac0 'p'Exception caught: cleaning lookahead and stack +0x56faeac0->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90, 0x56faeac0 } +0x56faea90->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90 } +0x56faea60->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60 } +0x56faea30->Object::~Object { 0x56faea00, 0x56faea30 } +0x56faea00->Object::~Object { 0x56faea00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1360: $PREPARSER ./input aaaap Starting parse Entering state 0 Stack now 0 Reading a token -0x56f74a00->Object::Object { } -Next token is token 'a' (0x56f74a00 'a') -Shifting token 'a' (0x56f74a00 'a') +0x56faea00->Object::Object { } +Next token is token 'a' (0x56faea00 'a') +Shifting token 'a' (0x56faea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a00 'a') --> $$ = nterm item (0x56f74a00 'a') + $1 = token 'a' (0x56faea00 'a') +-> $$ = nterm item (0x56faea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x56f74a30->Object::Object { 0x56f74a00 } -Next token is token 'a' (0x56f74a30 'a') -Shifting token 'a' (0x56f74a30 'a') +0x56faea30->Object::Object { 0x56faea00 } +Next token is token 'a' (0x56faea30 'a') +Shifting token 'a' (0x56faea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a30 'a') --> $$ = nterm item (0x56f74a30 'a') + $1 = token 'a' (0x56faea30 'a') +-> $$ = nterm item (0x56faea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x56f74a60->Object::Object { 0x56f74a00, 0x56f74a30 } -Next token is token 'a' (0x56f74a60 'a') -Shifting token 'a' (0x56f74a60 'a') +0x56faea60->Object::Object { 0x56faea00, 0x56faea30 } +Next token is token 'a' (0x56faea60 'a') +Shifting token 'a' (0x56faea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a60 'a') --> $$ = nterm item (0x56f74a60 'a') + $1 = token 'a' (0x56faea60 'a') +-> $$ = nterm item (0x56faea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x56f74a90->Object::Object { 0x56f74a00, 0x56f74a30, 0x56f74a60 } -Next token is token 'a' (0x56f74a90 'a') -Shifting token 'a' (0x56f74a90 'a') +0x56faea90->Object::Object { 0x56faea00, 0x56faea30, 0x56faea60 } +Next token is token 'a' (0x56faea90 'a') +Shifting token 'a' (0x56faea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f74a90 'a') --> $$ = nterm item (0x56f74a90 'a') + $1 = token 'a' (0x56faea90 'a') +-> $$ = nterm item (0x56faea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x56f74ac0->Object::Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90 } -Next token is token 'p' (0x56f74ac0 'p'Exception caught: cleaning lookahead and stack -0x56f74ac0->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90, 0x56f74ac0 } -0x56f74a90->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60, 0x56f74a90 } -0x56f74a60->Object::~Object { 0x56f74a00, 0x56f74a30, 0x56f74a60 } -0x56f74a30->Object::~Object { 0x56f74a00, 0x56f74a30 } -0x56f74a00->Object::~Object { 0x56f74a00 } +0x56faeac0->Object::Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90 } +Next token is token 'p' (0x56faeac0 'p'Exception caught: cleaning lookahead and stack +0x56faeac0->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90, 0x56faeac0 } +0x56faea90->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60, 0x56faea90 } +0x56faea60->Object::~Object { 0x56faea00, 0x56faea30, 0x56faea60 } +0x56faea30->Object::~Object { 0x56faea00, 0x56faea30 } +0x56faea00->Object::~Object { 0x56faea00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x57160a00->Object::Object { } +Next token is token 'a' (0x57160a00 'a') +Shifting token 'a' (0x57160a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a00 'a') +-> $$ = nterm item (0x57160a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x57160a30->Object::Object { 0x57160a00 } +Next token is token 'a' (0x57160a30 'a') +Shifting token 'a' (0x57160a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a30 'a') +-> $$ = nterm item (0x57160a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x57160a60->Object::Object { 0x57160a00, 0x57160a30 } +Next token is token 'a' (0x57160a60 'a') +Shifting token 'a' (0x57160a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a60 'a') +-> $$ = nterm item (0x57160a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x57160a90->Object::Object { 0x57160a00, 0x57160a30, 0x57160a60 } +Next token is token 'a' (0x57160a90 'a') +Shifting token 'a' (0x57160a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a90 'a') +-> $$ = nterm item (0x57160a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x57160ac0->Object::Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90 } +Next token is token 'p' (0x57160ac0 'p'Exception caught: cleaning lookahead and stack +0x57160ac0->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90, 0x57160ac0 } +0x57160a90->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90 } +0x57160a60->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60 } +0x57160a30->Object::~Object { 0x57160a00, 0x57160a30 } +0x57160a00->Object::~Object { 0x57160a00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1361: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x57160a00->Object::Object { } +Next token is token 'a' (0x57160a00 'a') +Shifting token 'a' (0x57160a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a00 'a') +-> $$ = nterm item (0x57160a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x57160a30->Object::Object { 0x57160a00 } +Next token is token 'a' (0x57160a30 'a') +Shifting token 'a' (0x57160a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a30 'a') +-> $$ = nterm item (0x57160a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x57160a60->Object::Object { 0x57160a00, 0x57160a30 } +Next token is token 'a' (0x57160a60 'a') +Shifting token 'a' (0x57160a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a60 'a') +-> $$ = nterm item (0x57160a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x57160a90->Object::Object { 0x57160a00, 0x57160a30, 0x57160a60 } +Next token is token 'a' (0x57160a90 'a') +Shifting token 'a' (0x57160a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57160a90 'a') +-> $$ = nterm item (0x57160a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x57160ac0->Object::Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90 } +Next token is token 'p' (0x57160ac0 'p'Exception caught: cleaning lookahead and stack +0x57160ac0->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90, 0x57160ac0 } +0x57160a90->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60, 0x57160a90 } +0x57160a60->Object::~Object { 0x57160a00, 0x57160a30, 0x57160a60 } +0x57160a30->Object::~Object { 0x57160a00, 0x57160a30 } +0x57160a00->Object::~Object { 0x57160a00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae ./c++.at:1361: $PREPARSER ./input aaaaT stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaR stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:1362: $PREPARSER ./input aaaas stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:1360: $PREPARSER ./input aaaaT +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaap stderr: stdout: -./c++.at:567: $here/modern -stdout: -Modern C++: 201103 -./c++.at:567: $PREPARSER ./list +./c++.at:857: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: $PREPARSER ./input --debug aaaap +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffec0e1f->Object::Object { } +0xffec0ec8->Object::Object { 0xffec0e1f } +0xffec0e1f->Object::~Object { 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0xffec0ec8 } +0xffec0dbb->Object::Object { 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3c4->Object::Object { 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0xffec0e30 } +Entering state 2 +Stack now 0 2 +0xffec0ed8->Object::Object { 0x56e4a3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3c4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3c4->Object::~Object { 0x56e4a3c4, 0xffec0ed8 } +0x56e4a3c4->Object::Object { 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3d4->Object::Object { 0x56e4a3c4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3d4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3d4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0x56e4a3d4->Object::Object { 0x56e4a3c4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3e4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3e4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3e4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0x56e4a3e4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3f4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3f4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3f4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +0x56e4a3f4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'p' (0xffec0ec8 'p'Exception caught: cleaning lookahead and stack +0x56e4a3f4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ec8 } +0x56e4a3e4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ec8 } +0x56e4a3d4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ec8 } +0x56e4a3c4->Object::~Object { 0x56e4a3c4, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0xffec0ec8 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:851: $PREPARSER ./input stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffec0e1f->Object::Object { } +0xffec0ec8->Object::Object { 0xffec0e1f } +0xffec0e1f->Object::~Object { 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0xffec0ec8 } +0xffec0dbb->Object::Object { 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3c4->Object::Object { 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0xffec0e30 } +Entering state 2 +Stack now 0 2 +0xffec0ed8->Object::Object { 0x56e4a3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3c4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3c4->Object::~Object { 0x56e4a3c4, 0xffec0ed8 } +0x56e4a3c4->Object::Object { 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3d4->Object::Object { 0x56e4a3c4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3d4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3d4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0x56e4a3d4->Object::Object { 0x56e4a3c4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3e4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3e4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3e4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0x56e4a3e4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'a' (0xffec0ec8 'a') +0xffec0e30->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ec8 } +0xffec0dbb->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30, 0xffec0ec8 } +0xffec0dbb->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0dbb, 0xffec0e30, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30, 0xffec0ec8 } +Shifting token 'a' (0xffec0e30 'a') +0x56e4a3f4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0e30 } +0xffec0dbf->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e30 } +0xffec0dbf->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0dbf, 0xffec0e30 } +0xffec0e30->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e30 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffec0ed8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56e4a3f4 'a') +-> $$ = nterm item (0xffec0ed8 'a') +0x56e4a3f4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +0x56e4a3f4->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ed8 } +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f, 0xffec0ed8 } +0xffec0ed8->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ed8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xffec0e1f->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4 } +0xffec0ec8->Object::Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f } +0xffec0e1f->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0e1f, 0xffec0ec8 } +Next token is token 'p' (0xffec0ec8 'p'Exception caught: cleaning lookahead and stack +0x56e4a3f4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0x56e4a3f4, 0xffec0ec8 } +0x56e4a3e4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0x56e4a3e4, 0xffec0ec8 } +0x56e4a3d4->Object::~Object { 0x56e4a3c4, 0x56e4a3d4, 0xffec0ec8 } +0x56e4a3c4->Object::~Object { 0x56e4a3c4, 0xffec0ec8 } +0xffec0ec8->Object::~Object { 0xffec0ec8 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -./c++.at:849: $PREPARSER ./input +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stdout: -Modern C++: 201103 -./c++.at:570: $PREPARSER ./list +./c++.at:857: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x58340a00->Object::Object { } +Next token is token 'a' (0x58340a00 'a') +Shifting token 'a' (0x58340a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a00 'a') +-> $$ = nterm item (0x58340a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x58340a30->Object::Object { 0x58340a00 } +Next token is token 'a' (0x58340a30 'a') +Shifting token 'a' (0x58340a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a30 'a') +-> $$ = nterm item (0x58340a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x58340a60->Object::Object { 0x58340a00, 0x58340a30 } +Next token is token 'a' (0x58340a60 'a') +Shifting token 'a' (0x58340a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a60 'a') +-> $$ = nterm item (0x58340a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x58340a90->Object::Object { 0x58340a00, 0x58340a30, 0x58340a60 } +Next token is token 'a' (0x58340a90 'a') +Shifting token 'a' (0x58340a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a90 'a') +-> $$ = nterm item (0x58340a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x58340ac0->Object::Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90 } +Next token is token 'p' (0x58340ac0 'p'Exception caught: cleaning lookahead and stack +0x58340ac0->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90, 0x58340ac0 } +0x58340a90->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90 } +0x58340a60->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60 } +0x58340a30->Object::~Object { 0x58340a00, 0x58340a30 } +0x58340a00->Object::~Object { 0x58340a00 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:659: $PREPARSER ./input stdout: -./existing.at:1460: $PREPARSER ./input +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x58340a00->Object::Object { } +Next token is token 'a' (0x58340a00 'a') +Shifting token 'a' (0x58340a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a00 'a') +-> $$ = nterm item (0x58340a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x58340a30->Object::Object { 0x58340a00 } +Next token is token 'a' (0x58340a30 'a') +Shifting token 'a' (0x58340a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a30 'a') +-> $$ = nterm item (0x58340a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x58340a60->Object::Object { 0x58340a00, 0x58340a30 } +Next token is token 'a' (0x58340a60 'a') +Shifting token 'a' (0x58340a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a60 'a') +-> $$ = nterm item (0x58340a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x58340a90->Object::Object { 0x58340a00, 0x58340a30, 0x58340a60 } +Next token is token 'a' (0x58340a90 'a') +Shifting token 'a' (0x58340a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x58340a90 'a') +-> $$ = nterm item (0x58340a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x58340ac0->Object::Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90 } +Next token is token 'p' (0x58340ac0 'p'Exception caught: cleaning lookahead and stack +0x58340ac0->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90, 0x58340ac0 } +0x58340a90->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60, 0x58340a90 } +0x58340a60->Object::~Object { 0x58340a00, 0x58340a30, 0x58340a60 } +0x58340a30->Object::~Object { 0x58340a00, 0x58340a30 } +0x58340a00->Object::~Object { 0x58340a00 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +stdout: +./c++.at:1363: $PREPARSER ./input aaaal +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stderr: +stderr: +exception caught: yylex +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stdout: +stderr: +./c++.at:857: $PREPARSER ./input +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffc5c1ef->Object::Object { } +0xffc5c298->Object::Object { 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0xffc5c298 } +0xffc5c298->Object::~Object { 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3c4->Object::Object { 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3c4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3c4->Object::~Object { 0x57d8a3c4, 0xffc5c2a8 } +0x57d8a3c4->Object::Object { 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3d4->Object::Object { 0x57d8a3c4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3d4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3d4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +0x57d8a3d4->Object::Object { 0x57d8a3c4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3e4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3e4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3e4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +0x57d8a3e4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3f4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3f4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3f4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c2a8 } +0x57d8a3f4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'p' (0xffc5c298 'p'Exception caught: cleaning lookahead and stack +0x57d8a3f4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c298 } +0x57d8a3e4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c298 } +0x57d8a3d4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c298 } +0x57d8a3c4->Object::~Object { 0x57d8a3c4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0xffc5c298 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) +0xffc5c1ef->Object::Object { } +0xffc5c298->Object::Object { 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0xffc5c298 } +0xffc5c298->Object::~Object { 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3c4->Object::Object { 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0xffc5c1e8 } Entering state 1 Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 +0xffc5c2a8->Object::Object { 0x57d8a3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3c4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3c4->Object::~Object { 0x57d8a3c4, 0xffc5c2a8 } +0x57d8a3c4->Object::Object { 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 +0xffc5c1ef->Object::Object { 0x57d8a3c4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3d4->Object::Object { 0x57d8a3c4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3d4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3d4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +0x57d8a3d4->Object::Object { 0x57d8a3c4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -624. existing.at:1460: ok -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201103 -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3e4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3e4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3e4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +0x57d8a3e4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'a' (0xffc5c298 'a') +0xffc5c1e8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8, 0xffc5c298 } +Shifting token 'a' (0xffc5c1e8 'a') +0x57d8a3f4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c1e8 } +0xffc5c1e8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1e8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffc5c2a8->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57d8a3f4 'a') +-> $$ = nterm item (0xffc5c2a8 'a') +0x57d8a3f4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c2a8 } +0x57d8a3f4->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c2a8 } +0xffc5c2a8->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c2a8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffc5c1ef->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4 } +0xffc5c298->Object::Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1ef } +0xffc5c1ef->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c1ef, 0xffc5c298 } +Next token is token 'p' (0xffc5c298 'p'Exception caught: cleaning lookahead and stack +0x57d8a3f4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0x57d8a3f4, 0xffc5c298 } +0x57d8a3e4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0x57d8a3e4, 0xffc5c298 } +0x57d8a3d4->Object::~Object { 0x57d8a3c4, 0x57d8a3d4, 0xffc5c298 } +0x57d8a3c4->Object::~Object { 0x57d8a3c4, 0xffc5c298 } +0xffc5c298->Object::~Object { 0xffc5c298 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae stderr: -stdout: -./c++.at:569: $here/modern -stdout: -Modern C++: 201103 -./c++.at:569: $PREPARSER ./list +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT stderr: -stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201103 -./c++.at:572: $PREPARSER ./list +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./c++.at:1362: ./exceptions || exit 77 stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: -Inner caught -Outer caught -./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaal @@ -251965,8 +254494,6 @@ ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaap stderr: -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: @@ -251974,57 +254501,57 @@ Entering state 0 Stack now 0 Reading a token -0x57b0aa00->Object::Object { } -Next token is token 'a' (0x57b0aa00 'a') -Shifting token 'a' (0x57b0aa00 'a') +0x56a55a00->Object::Object { } +Next token is token 'a' (0x56a55a00 'a') +Shifting token 'a' (0x56a55a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa00 'a') --> $$ = nterm item (0x57b0aa00 'a') + $1 = token 'a' (0x56a55a00 'a') +-> $$ = nterm item (0x56a55a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57b0aa30->Object::Object { 0x57b0aa00 } -Next token is token 'a' (0x57b0aa30 'a') -Shifting token 'a' (0x57b0aa30 'a') +0x56a55a30->Object::Object { 0x56a55a00 } +Next token is token 'a' (0x56a55a30 'a') +Shifting token 'a' (0x56a55a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa30 'a') --> $$ = nterm item (0x57b0aa30 'a') + $1 = token 'a' (0x56a55a30 'a') +-> $$ = nterm item (0x56a55a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57b0aa60->Object::Object { 0x57b0aa00, 0x57b0aa30 } -Next token is token 'a' (0x57b0aa60 'a') -Shifting token 'a' (0x57b0aa60 'a') +0x56a55a60->Object::Object { 0x56a55a00, 0x56a55a30 } +Next token is token 'a' (0x56a55a60 'a') +Shifting token 'a' (0x56a55a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa60 'a') --> $$ = nterm item (0x57b0aa60 'a') + $1 = token 'a' (0x56a55a60 'a') +-> $$ = nterm item (0x56a55a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57b0aa90->Object::Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60 } -Next token is token 'a' (0x57b0aa90 'a') -Shifting token 'a' (0x57b0aa90 'a') +0x56a55a90->Object::Object { 0x56a55a00, 0x56a55a30, 0x56a55a60 } +Next token is token 'a' (0x56a55a90 'a') +Shifting token 'a' (0x56a55a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa90 'a') --> $$ = nterm item (0x57b0aa90 'a') + $1 = token 'a' (0x56a55a90 'a') +-> $$ = nterm item (0x56a55a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57b0aac0->Object::Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90 } -Next token is token 'p' (0x57b0aac0 'p'Exception caught: cleaning lookahead and stack -0x57b0aac0->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90, 0x57b0aac0 } -0x57b0aa90->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90 } -0x57b0aa60->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60 } -0x57b0aa30->Object::~Object { 0x57b0aa00, 0x57b0aa30 } -0x57b0aa00->Object::~Object { 0x57b0aa00 } +0x56a55ac0->Object::Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90 } +Next token is token 'p' (0x56a55ac0 'p'Exception caught: cleaning lookahead and stack +0x56a55ac0->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90, 0x56a55ac0 } +0x56a55a90->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90 } +0x56a55a60->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60 } +0x56a55a30->Object::~Object { 0x56a55a00, 0x56a55a30 } +0x56a55a00->Object::~Object { 0x56a55a00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252033,57 +254560,57 @@ Entering state 0 Stack now 0 Reading a token -0x57b0aa00->Object::Object { } -Next token is token 'a' (0x57b0aa00 'a') -Shifting token 'a' (0x57b0aa00 'a') +0x56a55a00->Object::Object { } +Next token is token 'a' (0x56a55a00 'a') +Shifting token 'a' (0x56a55a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa00 'a') --> $$ = nterm item (0x57b0aa00 'a') + $1 = token 'a' (0x56a55a00 'a') +-> $$ = nterm item (0x56a55a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57b0aa30->Object::Object { 0x57b0aa00 } -Next token is token 'a' (0x57b0aa30 'a') -Shifting token 'a' (0x57b0aa30 'a') +0x56a55a30->Object::Object { 0x56a55a00 } +Next token is token 'a' (0x56a55a30 'a') +Shifting token 'a' (0x56a55a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa30 'a') --> $$ = nterm item (0x57b0aa30 'a') + $1 = token 'a' (0x56a55a30 'a') +-> $$ = nterm item (0x56a55a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57b0aa60->Object::Object { 0x57b0aa00, 0x57b0aa30 } -Next token is token 'a' (0x57b0aa60 'a') -Shifting token 'a' (0x57b0aa60 'a') +0x56a55a60->Object::Object { 0x56a55a00, 0x56a55a30 } +Next token is token 'a' (0x56a55a60 'a') +Shifting token 'a' (0x56a55a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa60 'a') --> $$ = nterm item (0x57b0aa60 'a') + $1 = token 'a' (0x56a55a60 'a') +-> $$ = nterm item (0x56a55a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57b0aa90->Object::Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60 } -Next token is token 'a' (0x57b0aa90 'a') -Shifting token 'a' (0x57b0aa90 'a') +0x56a55a90->Object::Object { 0x56a55a00, 0x56a55a30, 0x56a55a60 } +Next token is token 'a' (0x56a55a90 'a') +Shifting token 'a' (0x56a55a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57b0aa90 'a') --> $$ = nterm item (0x57b0aa90 'a') + $1 = token 'a' (0x56a55a90 'a') +-> $$ = nterm item (0x56a55a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57b0aac0->Object::Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90 } -Next token is token 'p' (0x57b0aac0 'p'Exception caught: cleaning lookahead and stack -0x57b0aac0->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90, 0x57b0aac0 } -0x57b0aa90->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60, 0x57b0aa90 } -0x57b0aa60->Object::~Object { 0x57b0aa00, 0x57b0aa30, 0x57b0aa60 } -0x57b0aa30->Object::~Object { 0x57b0aa00, 0x57b0aa30 } -0x57b0aa00->Object::~Object { 0x57b0aa00 } +0x56a55ac0->Object::Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90 } +Next token is token 'p' (0x56a55ac0 'p'Exception caught: cleaning lookahead and stack +0x56a55ac0->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90, 0x56a55ac0 } +0x56a55a90->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60, 0x56a55a90 } +0x56a55a60->Object::~Object { 0x56a55a00, 0x56a55a30, 0x56a55a60 } +0x56a55a30->Object::~Object { 0x56a55a00, 0x56a55a30 } +0x56a55a00->Object::~Object { 0x56a55a00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr @@ -252097,52 +254624,361 @@ stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1360: $PREPARSER ./input aaaaT stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:566: $here/modern -./c++.at:856: $PREPARSER ./input -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 201402 -./c++.at:566: $PREPARSER ./list +./c++.at:1362: $PREPARSER ./input aaaas stderr: -Destroy: "0" -Destroy: "0" +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xfff6f32f->Object::Object { } +0xfff6f3d8->Object::Object { 0xfff6f32f } +0xfff6f32f->Object::~Object { 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713c4->Object::Object { 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0xfff6f340 } +Entering state 2 +Stack now 0 2 +0xfff6f3e8->Object::Object { 0x581713c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713c4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713c4->Object::~Object { 0x581713c4, 0xfff6f3e8 } +0x581713c4->Object::Object { 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713d4->Object::Object { 0x581713c4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713d4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713d4->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0x581713d4->Object::Object { 0x581713c4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713e4->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713e4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713e4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0x581713e4->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713f4->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 11 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713f4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713f4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +0x581713f4->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'p' (0xfff6f3d8 'p'Exception caught: cleaning lookahead and stack +0x581713f4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3d8 } +0x581713e4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3d8 } +0x581713d4->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3d8 } +0x581713c4->Object::~Object { 0x581713c4, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0xfff6f3d8 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xfff6f32f->Object::Object { } +0xfff6f3d8->Object::Object { 0xfff6f32f } +0xfff6f32f->Object::~Object { 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713c4->Object::Object { 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0xfff6f340 } +Entering state 2 +Stack now 0 2 +0xfff6f3e8->Object::Object { 0x581713c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713c4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713c4->Object::~Object { 0x581713c4, 0xfff6f3e8 } +0x581713c4->Object::Object { 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713d4->Object::Object { 0x581713c4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713d4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713d4->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0x581713d4->Object::Object { 0x581713c4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713e4->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713e4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713e4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0x581713e4->Object::Object { 0x581713c4, 0x581713d4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'a' (0xfff6f3d8 'a') +0xfff6f340->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3d8 } +0xfff6f2cb->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340, 0xfff6f3d8 } +0xfff6f2cb->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f2cb, 0xfff6f340, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340, 0xfff6f3d8 } +Shifting token 'a' (0xfff6f340 'a') +0x581713f4->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f340 } +0xfff6f2cf->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f340 } +0xfff6f2cf->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f2cf, 0xfff6f340 } +0xfff6f340->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f340 } +Entering state 2 +Stack now 0 11 11 11 2 +0xfff6f3e8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x581713f4 'a') +-> $$ = nterm item (0xfff6f3e8 'a') +0x581713f4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +0x581713f4->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3e8 } +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f, 0xfff6f3e8 } +0xfff6f3e8->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3e8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xfff6f32f->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4 } +0xfff6f3d8->Object::Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f } +0xfff6f32f->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f32f, 0xfff6f3d8 } +Next token is token 'p' (0xfff6f3d8 'p'Exception caught: cleaning lookahead and stack +0x581713f4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0x581713f4, 0xfff6f3d8 } +0x581713e4->Object::~Object { 0x581713c4, 0x581713d4, 0x581713e4, 0xfff6f3d8 } +0x581713d4->Object::~Object { 0x581713c4, 0x581713d4, 0xfff6f3d8 } +0x581713c4->Object::~Object { 0x581713c4, 0xfff6f3d8 } +0xfff6f3d8->Object::~Object { 0xfff6f3d8 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +stderr: +stdout: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $here/modern +./c++.at:1362: $PREPARSER ./input aaaaE +stdout: +Modern C++: 202302 +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +./c++.at:574: $PREPARSER ./list +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Destroy: "" +Destroy: "" Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: "" Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:1362: $PREPARSER ./input aaaaR +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +stderr: +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: ./c++.at:1064: $PREPARSER ./input < in stderr: @@ -252151,38 +254987,45 @@ error: invalid character caught error ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1064: $PREPARSER ./input < in stderr: error: invalid expression ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1064: $PREPARSER ./input < in stderr: error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in stderr: -stdout: -./c++.at:850: $PREPARSER ./input +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./c++.at:851: $PREPARSER ./input stderr: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -./c++.at:235: $PREPARSER ./list stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: ./check ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +683. c++.at:1065: ok stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas @@ -252200,72 +255043,63 @@ ./c++.at:1361: $PREPARSER ./input aaaap stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./c++.at:1361: $PREPARSER ./input --debug aaaap -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x58410a00->Object::Object { } -Next token is token 'a' (0x58410a00 'a') -Shifting token 'a' (0x58410a00 'a') +0x57d33a00->Object::Object { } +Next token is token 'a' (0x57d33a00 'a') +Shifting token 'a' (0x57d33a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a00 'a') --> $$ = nterm item (0x58410a00 'a') + $1 = token 'a' (0x57d33a00 'a') +-> $$ = nterm item (0x57d33a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x58410a30->Object::Object { 0x58410a00 } -Next token is token 'a' (0x58410a30 'a') -Shifting token 'a' (0x58410a30 'a') +0x57d33a30->Object::Object { 0x57d33a00 } +Next token is token 'a' (0x57d33a30 'a') +Shifting token 'a' (0x57d33a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a30 'a') --> $$ = nterm item (0x58410a30 'a') + $1 = token 'a' (0x57d33a30 'a') +-> $$ = nterm item (0x57d33a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x58410a60->Object::Object { 0x58410a00, 0x58410a30 } -Next token is token 'a' (0x58410a60 'a') -Shifting token 'a' (0x58410a60 'a') +0x57d33a60->Object::Object { 0x57d33a00, 0x57d33a30 } +Next token is token 'a' (0x57d33a60 'a') +Shifting token 'a' (0x57d33a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a60 'a') --> $$ = nterm item (0x58410a60 'a') + $1 = token 'a' (0x57d33a60 'a') +-> $$ = nterm item (0x57d33a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x58410a90->Object::Object { 0x58410a00, 0x58410a30, 0x58410a60 } -Next token is token 'a' (0x58410a90 'a') -Shifting token 'a' (0x58410a90 'a') +0x57d33a90->Object::Object { 0x57d33a00, 0x57d33a30, 0x57d33a60 } +Next token is token 'a' (0x57d33a90 'a') +Shifting token 'a' (0x57d33a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a90 'a') --> $$ = nterm item (0x58410a90 'a') + $1 = token 'a' (0x57d33a90 'a') +-> $$ = nterm item (0x57d33a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x58410ac0->Object::Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90 } -Next token is token 'p' (0x58410ac0 'p'Exception caught: cleaning lookahead and stack -0x58410ac0->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90, 0x58410ac0 } -0x58410a90->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90 } -0x58410a60->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60 } -0x58410a30->Object::~Object { 0x58410a00, 0x58410a30 } -0x58410a00->Object::~Object { 0x58410a00 } +0x57d33ac0->Object::Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90 } +Next token is token 'p' (0x57d33ac0 'p'Exception caught: cleaning lookahead and stack +0x57d33ac0->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90, 0x57d33ac0 } +0x57d33a90->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90 } +0x57d33a60->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60 } +0x57d33a30->Object::~Object { 0x57d33a00, 0x57d33a30 } +0x57d33a00->Object::~Object { 0x57d33a00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252274,57 +255108,57 @@ Entering state 0 Stack now 0 Reading a token -0x58410a00->Object::Object { } -Next token is token 'a' (0x58410a00 'a') -Shifting token 'a' (0x58410a00 'a') +0x57d33a00->Object::Object { } +Next token is token 'a' (0x57d33a00 'a') +Shifting token 'a' (0x57d33a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a00 'a') --> $$ = nterm item (0x58410a00 'a') + $1 = token 'a' (0x57d33a00 'a') +-> $$ = nterm item (0x57d33a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x58410a30->Object::Object { 0x58410a00 } -Next token is token 'a' (0x58410a30 'a') -Shifting token 'a' (0x58410a30 'a') +0x57d33a30->Object::Object { 0x57d33a00 } +Next token is token 'a' (0x57d33a30 'a') +Shifting token 'a' (0x57d33a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a30 'a') --> $$ = nterm item (0x58410a30 'a') + $1 = token 'a' (0x57d33a30 'a') +-> $$ = nterm item (0x57d33a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x58410a60->Object::Object { 0x58410a00, 0x58410a30 } -Next token is token 'a' (0x58410a60 'a') -Shifting token 'a' (0x58410a60 'a') +0x57d33a60->Object::Object { 0x57d33a00, 0x57d33a30 } +Next token is token 'a' (0x57d33a60 'a') +Shifting token 'a' (0x57d33a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a60 'a') --> $$ = nterm item (0x58410a60 'a') + $1 = token 'a' (0x57d33a60 'a') +-> $$ = nterm item (0x57d33a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x58410a90->Object::Object { 0x58410a00, 0x58410a30, 0x58410a60 } -Next token is token 'a' (0x58410a90 'a') -Shifting token 'a' (0x58410a90 'a') +0x57d33a90->Object::Object { 0x57d33a00, 0x57d33a30, 0x57d33a60 } +Next token is token 'a' (0x57d33a90 'a') +Shifting token 'a' (0x57d33a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x58410a90 'a') --> $$ = nterm item (0x58410a90 'a') + $1 = token 'a' (0x57d33a90 'a') +-> $$ = nterm item (0x57d33a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x58410ac0->Object::Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90 } -Next token is token 'p' (0x58410ac0 'p'Exception caught: cleaning lookahead and stack -0x58410ac0->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90, 0x58410ac0 } -0x58410a90->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60, 0x58410a90 } -0x58410a60->Object::~Object { 0x58410a00, 0x58410a30, 0x58410a60 } -0x58410a30->Object::~Object { 0x58410a00, 0x58410a30 } -0x58410a00->Object::~Object { 0x58410a00 } +0x57d33ac0->Object::Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90 } +Next token is token 'p' (0x57d33ac0 'p'Exception caught: cleaning lookahead and stack +0x57d33ac0->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90, 0x57d33ac0 } +0x57d33a90->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60, 0x57d33a90 } +0x57d33a60->Object::~Object { 0x57d33a00, 0x57d33a30, 0x57d33a60 } +0x57d33a30->Object::~Object { 0x57d33a00, 0x57d33a30 } +0x57d33a00->Object::~Object { 0x57d33a00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr @@ -252332,224 +255166,32 @@ exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae stderr: -./c++.at:1065: $PREPARSER ./input < in exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaE stderr: -./c++.at:1065: $PREPARSER ./input < in exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaT stderr: -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:573: $here/modern ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stdout: -Modern C++: 201103 -./c++.at:573: $PREPARSER ./list ./c++.at:1361: $PREPARSER ./input aaaaR stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern -stderr: -stdout: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -Modern C++: 201103 -./c++.at:574: $PREPARSER ./list -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -681. c++.at:884: ok +674. c++.at:574: ok +682. c++.at:1064: ok stderr: stdout: -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $PREPARSER ./input -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -663. c++.at:26: ok -stderr: -688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... -stdout: -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -689. c++.at:1371: testing C++ GLR parser identifier shadowing ... -stderr: -./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stdout: -./c++.at:659: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201402 -./c++.at:568: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas @@ -252558,320 +255200,467 @@ ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaal stderr: -stdout: -stderr: -./c++.at:1066: $PREPARSER ./input < in exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i stderr: -stderr: exception caught: initial-action -stderr: -stdout: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: ./exceptions || exit 77 -stdout: -./c++.at:1066: $PREPARSER ./input < in -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stderr: ./c++.at:1360: $PREPARSER ./input aaaap stderr: -stdout: -Inner caught -Outer caught -stderr: -error: invalid expression -./c++.at:567: $here/modern -./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -Modern C++: 201402 -./c++.at:567: $PREPARSER ./list ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -stderr: -stderr: -./c++.at:1066: $PREPARSER ./input < in -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x580e5a00->Object::Object { } -Next token is token 'a' (0x580e5a00 'a') -Shifting token 'a' (0x580e5a00 'a') +0x5751ea00->Object::Object { } +Next token is token 'a' (0x5751ea00 'a') +Shifting token 'a' (0x5751ea00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a00 'a') --> $$ = nterm item (0x580e5a00 'a') + $1 = token 'a' (0x5751ea00 'a') +-> $$ = nterm item (0x5751ea00 'a') Entering state 11 Stack now 0 11 Reading a token -0x580e5a30->Object::Object { 0x580e5a00 } -Next token is token 'a' (0x580e5a30 'a') -Shifting token 'a' (0x580e5a30 'a') +0x5751ea30->Object::Object { 0x5751ea00 } +Next token is token 'a' (0x5751ea30 'a') +Shifting token 'a' (0x5751ea30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a30 'a') --> $$ = nterm item (0x580e5a30 'a') + $1 = token 'a' (0x5751ea30 'a') +-> $$ = nterm item (0x5751ea30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x580e5a60->Object::Object { 0x580e5a00, 0x580e5a30 } -Next token is token 'a' (0x580e5a60 'a') -Shifting token 'a' (0x580e5a60 'a') +0x5751ea60->Object::Object { 0x5751ea00, 0x5751ea30 } +Next token is token 'a' (0x5751ea60 'a') +Shifting token 'a' (0x5751ea60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a60 'a') --> $$ = nterm item (0x580e5a60 'a') + $1 = token 'a' (0x5751ea60 'a') +-> $$ = nterm item (0x5751ea60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x580e5a90->Object::Object { 0x580e5a00, 0x580e5a30, 0x580e5a60 } -Next token is token 'a' (0x580e5a90 'a') -Shifting token 'a' (0x580e5a90 'a') +0x5751ea90->Object::Object { 0x5751ea00, 0x5751ea30, 0x5751ea60 } +Next token is token 'a' (0x5751ea90 'a') +Shifting token 'a' (0x5751ea90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a90 'a') --> $$ = nterm item (0x580e5a90 'a') + $1 = token 'a' (0x5751ea90 'a') +-> $$ = nterm item (0x5751ea90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x580e5ac0->Object::Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90 } -Next token is token 'p' (0x580e5ac0 'p'Exception caught: cleaning lookahead and stack -0x580e5ac0->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90, 0x580e5ac0 } -0x580e5a90->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90 } -0x580e5a60->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60 } -0x580e5a30->Object::~Object { 0x580e5a00, 0x580e5a30 } -0x580e5a00->Object::~Object { 0x580e5a00 } +0x5751eac0->Object::Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90 } +Next token is token 'p' (0x5751eac0 'p'Exception caught: cleaning lookahead and stack +0x5751eac0->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90, 0x5751eac0 } +0x5751ea90->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90 } +0x5751ea60->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60 } +0x5751ea30->Object::~Object { 0x5751ea00, 0x5751ea30 } +0x5751ea00->Object::~Object { 0x5751ea00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:856: $PREPARSER ./input -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x580e5a00->Object::Object { } -Next token is token 'a' (0x580e5a00 'a') -Shifting token 'a' (0x580e5a00 'a') +0x5751ea00->Object::Object { } +Next token is token 'a' (0x5751ea00 'a') +Shifting token 'a' (0x5751ea00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a00 'a') --> $$ = nterm item (0x580e5a00 'a') + $1 = token 'a' (0x5751ea00 'a') +-> $$ = nterm item (0x5751ea00 'a') Entering state 11 Stack now 0 11 Reading a token -0x580e5a30->Object::Object { 0x580e5a00 } -Next token is token 'a' (0x580e5a30 'a') -Shifting token 'a' (0x580e5a30 'a') +0x5751ea30->Object::Object { 0x5751ea00 } +Next token is token 'a' (0x5751ea30 'a') +Shifting token 'a' (0x5751ea30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a30 'a') --> $$ = nterm item (0x580e5a30 'a') + $1 = token 'a' (0x5751ea30 'a') +-> $$ = nterm item (0x5751ea30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x580e5a60->Object::Object { 0x580e5a00, 0x580e5a30 } -Next token is token 'a' (0x580e5a60 'a') -Shifting token 'a' (0x580e5a60 'a') +0x5751ea60->Object::Object { 0x5751ea00, 0x5751ea30 } +Next token is token 'a' (0x5751ea60 'a') +Shifting token 'a' (0x5751ea60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a60 'a') --> $$ = nterm item (0x580e5a60 'a') + $1 = token 'a' (0x5751ea60 'a') +-> $$ = nterm item (0x5751ea60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x580e5a90->Object::Object { 0x580e5a00, 0x580e5a30, 0x580e5a60 } -Next token is token 'a' (0x580e5a90 'a') -Shifting token 'a' (0x580e5a90 'a') +0x5751ea90->Object::Object { 0x5751ea00, 0x5751ea30, 0x5751ea60 } +Next token is token 'a' (0x5751ea90 'a') +Shifting token 'a' (0x5751ea90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580e5a90 'a') --> $$ = nterm item (0x580e5a90 'a') + $1 = token 'a' (0x5751ea90 'a') +-> $$ = nterm item (0x5751ea90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x580e5ac0->Object::Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90 } -Next token is token 'p' (0x580e5ac0 'p'Exception caught: cleaning lookahead and stack -0x580e5ac0->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90, 0x580e5ac0 } -0x580e5a90->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60, 0x580e5a90 } -0x580e5a60->Object::~Object { 0x580e5a00, 0x580e5a30, 0x580e5a60 } -0x580e5a30->Object::~Object { 0x580e5a00, 0x580e5a30 } -0x580e5a00->Object::~Object { 0x580e5a00 } +0x5751eac0->Object::Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90 } +Next token is token 'p' (0x5751eac0 'p'Exception caught: cleaning lookahead and stack +0x5751eac0->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90, 0x5751eac0 } +0x5751ea90->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60, 0x5751ea90 } +0x5751ea60->Object::~Object { 0x5751ea00, 0x5751ea30, 0x5751ea60 } +0x5751ea30->Object::~Object { 0x5751ea00, 0x5751ea30 } +0x5751ea00->Object::~Object { 0x5751ea00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaaE stderr: -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -======== Testing with C++ standard flags: '' exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./c++.at:570: $here/modern -stdout: ./c++.at:1360: $PREPARSER ./input aaaaT -Modern C++: 201402 -./c++.at:570: $PREPARSER ./list -stderr: stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaR stderr: -stdout: -stderr: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1066: ./check +./c++.at:1363: $PREPARSER ./input aaaas stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +691. c++.at:1517: testing Default action ... +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaal +689. c++.at:1371: testing C++ GLR parser identifier shadowing ... stderr: -stdout: -./c++.at:850: $PREPARSER ./input +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1555: ./check +690. c++.at:1422: testing Shared locations ... +./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffaacdef->Object::Object { } +0xffaace98->Object::Object { 0xffaacdef } +0xffaacdef->Object::~Object { 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0xffaace98 } +0xffaacd8b->Object::Object { 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3c4->Object::Object { 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0xffaace00 } +Entering state 1 +Stack now 0 1 +0xffaacea8->Object::Object { 0x5682f3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3c4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3c4->Object::~Object { 0x5682f3c4, 0xffaacea8 } +0x5682f3c4->Object::Object { 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4 } +0xffaace98->Object::Object { 0x5682f3c4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3d4->Object::Object { 0x5682f3c4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +Entering state 1 +Stack now 0 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3d4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3d4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0x5682f3d4->Object::Object { 0x5682f3c4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3e4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +Entering state 1 +Stack now 0 10 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3e4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3e4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0x5682f3e4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3f4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace00 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3f4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3f4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +0x5682f3f4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef, 0xffaace98 } +Next token is token 'p' (0xffaace98 'p'Exception caught: cleaning lookahead and stack +0x5682f3f4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace98 } +0x5682f3e4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace98 } +0x5682f3d4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace98 } +0x5682f3c4->Object::~Object { 0x5682f3c4, 0xffaace98 } +0xffaace98->Object::~Object { 0xffaace98 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffaacdef->Object::Object { } +0xffaace98->Object::Object { 0xffaacdef } +0xffaacdef->Object::~Object { 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0xffaace98 } +0xffaacd8b->Object::Object { 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3c4->Object::Object { 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0xffaace00 } +Entering state 1 +Stack now 0 1 +0xffaacea8->Object::Object { 0x5682f3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3c4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3c4->Object::~Object { 0x5682f3c4, 0xffaacea8 } +0x5682f3c4->Object::Object { 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4 } +0xffaace98->Object::Object { 0x5682f3c4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3d4->Object::Object { 0x5682f3c4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +Entering state 1 +Stack now 0 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3d4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3d4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0x5682f3d4->Object::Object { 0x5682f3c4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3e4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +Entering state 1 +Stack now 0 10 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3e4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3e4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0x5682f3e4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacdef, 0xffaace98 } +Next token is token 'a' (0xffaace98 'a') +0xffaace00->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace98 } +0xffaacd8b->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00, 0xffaace98 } +0xffaacd8b->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacd8b, 0xffaace00, 0xffaace98 } +0xffaace98->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00, 0xffaace98 } +Shifting token 'a' (0xffaace00 'a') +0x5682f3f4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace00 } +0xffaacd8f->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace00 } +0xffaacd8f->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacd8f, 0xffaace00 } +0xffaace00->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace00 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffaacea8->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5682f3f4 'a') +-> $$ = nterm item (0xffaacea8 'a') +0x5682f3f4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +0x5682f3f4->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaacea8 } +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef, 0xffaacea8 } +0xffaacea8->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacea8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffaacdef->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4 } +0xffaace98->Object::Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef } +0xffaacdef->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaacdef, 0xffaace98 } +Next token is token 'p' (0xffaace98 'p'Exception caught: cleaning lookahead and stack +0x5682f3f4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0x5682f3f4, 0xffaace98 } +0x5682f3e4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0x5682f3e4, 0xffaace98 } +0x5682f3d4->Object::~Object { 0x5682f3c4, 0x5682f3d4, 0xffaace98 } +0x5682f3c4->Object::~Object { 0x5682f3c4, 0xffaace98 } +0xffaace98->Object::~Object { 0xffaace98 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr stderr: stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201402 -./c++.at:571: $PREPARSER ./list +./c++.at:851: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas +======== Testing with C++ standard flags: '' stderr: exception caught: reduction +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error ./c++.at:1362: $PREPARSER ./input aaaal +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: yylex +./c++.at:1363: $PREPARSER ./input aaaaE ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input i stderr: exception caught: initial-action -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaaT ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1362: $PREPARSER ./input aaaap stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252881,215 +255670,218 @@ Entering state 0 Stack now 0 Reading a token -0xffca318f->Object::Object { } -0xffca3238->Object::Object { 0xffca318f } -0xffca318f->Object::~Object { 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0xffca3238 } -0xffca3238->Object::~Object { 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323c4->Object::Object { 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0xffca3188 } +0xffffbbdf->Object::Object { } +0xffffbc88->Object::Object { 0xffffbbdf } +0xffffbbdf->Object::~Object { 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0xffffbc88 } +0xffffbc88->Object::~Object { 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843c4->Object::Object { 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0xffffbbd8 } Entering state 2 Stack now 0 2 -0xffca3248->Object::Object { 0x582323c4 } +0xffffbc98->Object::Object { 0x576843c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323c4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323c4->Object::~Object { 0x582323c4, 0xffca3248 } -0x582323c4->Object::Object { 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0xffca3248 } + $1 = token 'a' (0x576843c4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843c4->Object::~Object { 0x576843c4, 0xffffbc98 } +0x576843c4->Object::Object { 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0xffffbc98 } Entering state 11 Stack now 0 11 Reading a token -0xffca318f->Object::Object { 0x582323c4 } -0xffca3238->Object::Object { 0x582323c4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323d4->Object::Object { 0x582323c4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4 } +0xffffbc88->Object::Object { 0x576843c4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843d4->Object::Object { 0x576843c4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbd8 } Entering state 2 Stack now 0 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323d4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323d4->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3248 } -0x582323d4->Object::Object { 0x582323c4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3248 } + $1 = token 'a' (0x576843d4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843d4->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc98 } +0x576843d4->Object::Object { 0x576843c4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0x582323d4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323e4->Object::Object { 0x582323c4, 0x582323d4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0x576843d4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843e4->Object::Object { 0x576843c4, 0x576843d4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8 } Entering state 2 Stack now 0 11 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323e4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323e4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } -0x582323e4->Object::Object { 0x582323c4, 0x582323d4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } + $1 = token 'a' (0x576843e4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843e4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } +0x576843e4->Object::Object { 0x576843c4, 0x576843d4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323f4->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843f4->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbd8 } Entering state 2 Stack now 0 11 11 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323f4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323f4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3248 } -0x582323f4->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3248 } + $1 = token 'a' (0x576843f4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843f4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc98 } +0x576843f4->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca318f, 0xffca3238 } -Next token is token 'p' (0xffca3238 'p'Exception caught: cleaning lookahead and stack -0x582323f4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3238 } -0x582323e4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3238 } -0x582323d4->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3238 } -0x582323c4->Object::~Object { 0x582323c4, 0xffca3238 } -0xffca3238->Object::~Object { 0xffca3238 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'p' (0xffffbc88 'p'Exception caught: cleaning lookahead and stack +0x576843f4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc88 } +0x576843e4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc88 } +0x576843d4->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc88 } +0x576843c4->Object::~Object { 0x576843c4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0xffffbc88 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffca318f->Object::Object { } -0xffca3238->Object::Object { 0xffca318f } -0xffca318f->Object::~Object { 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0xffca3238 } -0xffca3238->Object::~Object { 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323c4->Object::Object { 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0xffca3188 } +0xffffbbdf->Object::Object { } +0xffffbc88->Object::Object { 0xffffbbdf } +0xffffbbdf->Object::~Object { 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0xffffbc88 } +0xffffbc88->Object::~Object { 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843c4->Object::Object { 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0xffffbbd8 } Entering state 2 Stack now 0 2 -0xffca3248->Object::Object { 0x582323c4 } +0xffffbc98->Object::Object { 0x576843c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323c4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323c4->Object::~Object { 0x582323c4, 0xffca3248 } -0x582323c4->Object::Object { 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0xffca3248 } + $1 = token 'a' (0x576843c4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843c4->Object::~Object { 0x576843c4, 0xffffbc98 } +0x576843c4->Object::Object { 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0xffffbc98 } Entering state 11 Stack now 0 11 Reading a token -0xffca318f->Object::Object { 0x582323c4 } -0xffca3238->Object::Object { 0x582323c4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323d4->Object::Object { 0x582323c4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4 } +0xffffbc88->Object::Object { 0x576843c4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843d4->Object::Object { 0x576843c4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbd8 } Entering state 2 Stack now 0 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323d4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323d4->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3248 } -0x582323d4->Object::Object { 0x582323c4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3248 } + $1 = token 'a' (0x576843d4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843d4->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc98 } +0x576843d4->Object::Object { 0x576843c4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0x582323d4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323e4->Object::Object { 0x582323c4, 0x582323d4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0x576843d4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843e4->Object::Object { 0x576843c4, 0x576843d4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8 } Entering state 2 Stack now 0 11 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323e4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323e4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } -0x582323e4->Object::Object { 0x582323c4, 0x582323d4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } + $1 = token 'a' (0x576843e4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843e4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } +0x576843e4->Object::Object { 0x576843c4, 0x576843d4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca318f, 0xffca3238 } -Next token is token 'a' (0xffca3238 'a') -0xffca3188->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3238 } -0xffca3238->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188, 0xffca3238 } -Shifting token 'a' (0xffca3188 'a') -0x582323f4->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3188 } -0xffca3188->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3188 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'a' (0xffffbc88 'a') +0xffffbbd8->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8, 0xffffbc88 } +Shifting token 'a' (0xffffbbd8 'a') +0x576843f4->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbbd8 } +0xffffbbd8->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbd8 } Entering state 2 Stack now 0 11 11 11 2 -0xffca3248->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4 } +0xffffbc98->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x582323f4 'a') --> $$ = nterm item (0xffca3248 'a') -0x582323f4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3248 } -0x582323f4->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3248 } -0xffca3248->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3248 } + $1 = token 'a' (0x576843f4 'a') +-> $$ = nterm item (0xffffbc98 'a') +0x576843f4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc98 } +0x576843f4->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc98 } +0xffffbc98->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc98 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffca318f->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4 } -0xffca3238->Object::Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca318f } -0xffca318f->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca318f, 0xffca3238 } -Next token is token 'p' (0xffca3238 'p'Exception caught: cleaning lookahead and stack -0x582323f4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0x582323f4, 0xffca3238 } -0x582323e4->Object::~Object { 0x582323c4, 0x582323d4, 0x582323e4, 0xffca3238 } -0x582323d4->Object::~Object { 0x582323c4, 0x582323d4, 0xffca3238 } -0x582323c4->Object::~Object { 0x582323c4, 0xffca3238 } -0xffca3238->Object::~Object { 0xffca3238 } +0xffffbbdf->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4 } +0xffffbc88->Object::Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbdf } +0xffffbbdf->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbbdf, 0xffffbc88 } +Next token is token 'p' (0xffffbc88 'p'Exception caught: cleaning lookahead and stack +0x576843f4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0x576843f4, 0xffffbc88 } +0x576843e4->Object::~Object { 0x576843c4, 0x576843d4, 0x576843e4, 0xffffbc88 } +0x576843d4->Object::~Object { 0x576843c4, 0x576843d4, 0xffffbc88 } +0x576843c4->Object::~Object { 0x576843c4, 0xffffbc88 } +0xffffbc88->Object::~Object { 0xffffbc88 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -stdout: -======== Testing with C++ standard flags: '' exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaaR stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaE +======== Testing with C++ standard flags: '' stderr: +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaT @@ -253097,129 +255889,15 @@ ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaR stderr: -stdout: -./c++.at:569: $here/modern -stdout: -Modern C++: 201402 -./c++.at:569: $PREPARSER ./list -stderr: -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -======== Testing with C++ standard flags: '' ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201402 -./c++.at:572: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -stdout: -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: $here/modern -stdout: -Modern C++: 201703 -./c++.at:566: $PREPARSER ./list -stderr: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1064: $PREPARSER ./input < in -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -error: invalid character ./c++.at:1361: $PREPARSER ./input aaaas -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -253228,74 +255906,66 @@ stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaap -./c++.at:856: $PREPARSER ./input stderr: -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' ./c++.at:1361: $PREPARSER ./input --debug aaaap -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57bc1a00->Object::Object { } -Next token is token 'a' (0x57bc1a00 'a') -Shifting token 'a' (0x57bc1a00 'a') +0x56c66a00->Object::Object { } +Next token is token 'a' (0x56c66a00 'a') +Shifting token 'a' (0x56c66a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a00 'a') --> $$ = nterm item (0x57bc1a00 'a') + $1 = token 'a' (0x56c66a00 'a') +-> $$ = nterm item (0x56c66a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57bc1a30->Object::Object { 0x57bc1a00 } -Next token is token 'a' (0x57bc1a30 'a') -Shifting token 'a' (0x57bc1a30 'a') +0x56c66a30->Object::Object { 0x56c66a00 } +Next token is token 'a' (0x56c66a30 'a') +Shifting token 'a' (0x56c66a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a30 'a') --> $$ = nterm item (0x57bc1a30 'a') + $1 = token 'a' (0x56c66a30 'a') +-> $$ = nterm item (0x56c66a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57bc1a60->Object::Object { 0x57bc1a00, 0x57bc1a30 } -Next token is token 'a' (0x57bc1a60 'a') -Shifting token 'a' (0x57bc1a60 'a') +0x56c66a60->Object::Object { 0x56c66a00, 0x56c66a30 } +Next token is token 'a' (0x56c66a60 'a') +Shifting token 'a' (0x56c66a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a60 'a') --> $$ = nterm item (0x57bc1a60 'a') + $1 = token 'a' (0x56c66a60 'a') +-> $$ = nterm item (0x56c66a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57bc1a90->Object::Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60 } -Next token is token 'a' (0x57bc1a90 'a') -Shifting token 'a' (0x57bc1a90 'a') +0x56c66a90->Object::Object { 0x56c66a00, 0x56c66a30, 0x56c66a60 } +Next token is token 'a' (0x56c66a90 'a') +Shifting token 'a' (0x56c66a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a90 'a') --> $$ = nterm item (0x57bc1a90 'a') + $1 = token 'a' (0x56c66a90 'a') +-> $$ = nterm item (0x56c66a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57bc1ac0->Object::Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90 } -Next token is token 'p' (0x57bc1ac0 'p'Exception caught: cleaning lookahead and stack -0x57bc1ac0->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90, 0x57bc1ac0 } -0x57bc1a90->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90 } -0x57bc1a60->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60 } -0x57bc1a30->Object::~Object { 0x57bc1a00, 0x57bc1a30 } -0x57bc1a00->Object::~Object { 0x57bc1a00 } +0x56c66ac0->Object::Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90 } +Next token is token 'p' (0x56c66ac0 'p'Exception caught: cleaning lookahead and stack +0x56c66ac0->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90, 0x56c66ac0 } +0x56c66a90->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90 } +0x56c66a60->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60 } +0x56c66a30->Object::~Object { 0x56c66a00, 0x56c66a30 } +0x56c66a00->Object::~Object { 0x56c66a00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -253304,57 +255974,57 @@ Entering state 0 Stack now 0 Reading a token -0x57bc1a00->Object::Object { } -Next token is token 'a' (0x57bc1a00 'a') -Shifting token 'a' (0x57bc1a00 'a') +0x56c66a00->Object::Object { } +Next token is token 'a' (0x56c66a00 'a') +Shifting token 'a' (0x56c66a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a00 'a') --> $$ = nterm item (0x57bc1a00 'a') + $1 = token 'a' (0x56c66a00 'a') +-> $$ = nterm item (0x56c66a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57bc1a30->Object::Object { 0x57bc1a00 } -Next token is token 'a' (0x57bc1a30 'a') -Shifting token 'a' (0x57bc1a30 'a') +0x56c66a30->Object::Object { 0x56c66a00 } +Next token is token 'a' (0x56c66a30 'a') +Shifting token 'a' (0x56c66a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a30 'a') --> $$ = nterm item (0x57bc1a30 'a') + $1 = token 'a' (0x56c66a30 'a') +-> $$ = nterm item (0x56c66a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57bc1a60->Object::Object { 0x57bc1a00, 0x57bc1a30 } -Next token is token 'a' (0x57bc1a60 'a') -Shifting token 'a' (0x57bc1a60 'a') +0x56c66a60->Object::Object { 0x56c66a00, 0x56c66a30 } +Next token is token 'a' (0x56c66a60 'a') +Shifting token 'a' (0x56c66a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a60 'a') --> $$ = nterm item (0x57bc1a60 'a') + $1 = token 'a' (0x56c66a60 'a') +-> $$ = nterm item (0x56c66a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57bc1a90->Object::Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60 } -Next token is token 'a' (0x57bc1a90 'a') -Shifting token 'a' (0x57bc1a90 'a') +0x56c66a90->Object::Object { 0x56c66a00, 0x56c66a30, 0x56c66a60 } +Next token is token 'a' (0x56c66a90 'a') +Shifting token 'a' (0x56c66a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57bc1a90 'a') --> $$ = nterm item (0x57bc1a90 'a') + $1 = token 'a' (0x56c66a90 'a') +-> $$ = nterm item (0x56c66a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57bc1ac0->Object::Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90 } -Next token is token 'p' (0x57bc1ac0 'p'Exception caught: cleaning lookahead and stack -0x57bc1ac0->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90, 0x57bc1ac0 } -0x57bc1a90->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60, 0x57bc1a90 } -0x57bc1a60->Object::~Object { 0x57bc1a00, 0x57bc1a30, 0x57bc1a60 } -0x57bc1a30->Object::~Object { 0x57bc1a00, 0x57bc1a30 } -0x57bc1a00->Object::~Object { 0x57bc1a00 } +0x56c66ac0->Object::Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90 } +Next token is token 'p' (0x56c66ac0 'p'Exception caught: cleaning lookahead and stack +0x56c66ac0->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90, 0x56c66ac0 } +0x56c66a90->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60, 0x56c66a90 } +0x56c66a60->Object::~Object { 0x56c66a00, 0x56c66a30, 0x56c66a60 } +0x56c66a30->Object::~Object { 0x56c66a00, 0x56c66a30 } +0x56c66a00->Object::~Object { 0x56c66a00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr @@ -253368,95 +256038,74 @@ stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in ./c++.at:1361: $PREPARSER ./input aaaaT stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1361: $PREPARSER ./input aaaaR -error: invalid expression -caught error -error: invalid character -caught error stderr: -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1065: $PREPARSER ./input < in stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1065: $PREPARSER ./input < in stdout: -./c++.at:235: $PREPARSER ./list -stderr: +./c++.at:857: $PREPARSER ./input stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1555: $PREPARSER ./test stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1555: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:850: $PREPARSER ./input stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh +./c++.at:1555: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy stderr: stdout: ======== Testing with C++ standard flags: '' ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1555: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: stdout: -Modern C++: 201402 -./c++.at:573: $PREPARSER ./list +./c++.at:851: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -253474,468 +256123,406 @@ Entering state 0 Stack now 0 Reading a token -0x577d7a00->Object::Object { } -Next token is token 'a' (0x577d7a00 'a') -Shifting token 'a' (0x577d7a00 'a') +0x567f6a00->Object::Object { } +Next token is token 'a' (0x567f6a00 'a') +Shifting token 'a' (0x567f6a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a00 'a') --> $$ = nterm item (0x577d7a00 'a') + $1 = token 'a' (0x567f6a00 'a') +-> $$ = nterm item (0x567f6a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x577d7a30->Object::Object { 0x577d7a00 } -Next token is token 'a' (0x577d7a30 'a') -Shifting token 'a' (0x577d7a30 'a') +0x567f6a30->Object::Object { 0x567f6a00 } +Next token is token 'a' (0x567f6a30 'a') +Shifting token 'a' (0x567f6a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a30 'a') --> $$ = nterm item (0x577d7a30 'a') + $1 = token 'a' (0x567f6a30 'a') +-> $$ = nterm item (0x567f6a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x577d7a60->Object::Object { 0x577d7a00, 0x577d7a30 } -Next token is token 'a' (0x577d7a60 'a') -Shifting token 'a' (0x577d7a60 'a') +0x567f6a60->Object::Object { 0x567f6a00, 0x567f6a30 } +Next token is token 'a' (0x567f6a60 'a') +Shifting token 'a' (0x567f6a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a60 'a') --> $$ = nterm item (0x577d7a60 'a') + $1 = token 'a' (0x567f6a60 'a') +-> $$ = nterm item (0x567f6a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x577d7a90->Object::Object { 0x577d7a00, 0x577d7a30, 0x577d7a60 } -Next token is token 'a' (0x577d7a90 'a') -Shifting token 'a' (0x577d7a90 'a') +0x567f6a90->Object::Object { 0x567f6a00, 0x567f6a30, 0x567f6a60 } +Next token is token 'a' (0x567f6a90 'a') +Shifting token 'a' (0x567f6a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a90 'a') --> $$ = nterm item (0x577d7a90 'a') + $1 = token 'a' (0x567f6a90 'a') +-> $$ = nterm item (0x567f6a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x577d7ac0->Object::Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90 } -Next token is token 'p' (0x577d7ac0 'p'Exception caught: cleaning lookahead and stack -0x577d7ac0->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90, 0x577d7ac0 } -0x577d7a90->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90 } -0x577d7a60->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60 } -0x577d7a30->Object::~Object { 0x577d7a00, 0x577d7a30 } -0x577d7a00->Object::~Object { 0x577d7a00 } +0x567f6ac0->Object::Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90 } +Next token is token 'p' (0x567f6ac0 'p'Exception caught: cleaning lookahead and stack +0x567f6ac0->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90, 0x567f6ac0 } +0x567f6a90->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90 } +0x567f6a60->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60 } +0x567f6a30->Object::~Object { 0x567f6a00, 0x567f6a30 } +0x567f6a00->Object::~Object { 0x567f6a00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x577d7a00->Object::Object { } -Next token is token 'a' (0x577d7a00 'a') -Shifting token 'a' (0x577d7a00 'a') +0x567f6a00->Object::Object { } +Next token is token 'a' (0x567f6a00 'a') +Shifting token 'a' (0x567f6a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a00 'a') --> $$ = nterm item (0x577d7a00 'a') + $1 = token 'a' (0x567f6a00 'a') +-> $$ = nterm item (0x567f6a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x577d7a30->Object::Object { 0x577d7a00 } -Next token is token 'a' (0x577d7a30 'a') -Shifting token 'a' (0x577d7a30 'a') +0x567f6a30->Object::Object { 0x567f6a00 } +Next token is token 'a' (0x567f6a30 'a') +Shifting token 'a' (0x567f6a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a30 'a') --> $$ = nterm item (0x577d7a30 'a') + $1 = token 'a' (0x567f6a30 'a') +-> $$ = nterm item (0x567f6a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x577d7a60->Object::Object { 0x577d7a00, 0x577d7a30 } -Next token is token 'a' (0x577d7a60 'a') -Shifting token 'a' (0x577d7a60 'a') +0x567f6a60->Object::Object { 0x567f6a00, 0x567f6a30 } +Next token is token 'a' (0x567f6a60 'a') +Shifting token 'a' (0x567f6a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a60 'a') --> $$ = nterm item (0x577d7a60 'a') + $1 = token 'a' (0x567f6a60 'a') +-> $$ = nterm item (0x567f6a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x577d7a90->Object::Object { 0x577d7a00, 0x577d7a30, 0x577d7a60 } -Next token is token 'a' (0x577d7a90 'a') -Shifting token 'a' (0x577d7a90 'a') +0x567f6a90->Object::Object { 0x567f6a00, 0x567f6a30, 0x567f6a60 } +Next token is token 'a' (0x567f6a90 'a') +Shifting token 'a' (0x567f6a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x577d7a90 'a') --> $$ = nterm item (0x577d7a90 'a') + $1 = token 'a' (0x567f6a90 'a') +-> $$ = nterm item (0x567f6a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x577d7ac0->Object::Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90 } -Next token is token 'p' (0x577d7ac0 'p'Exception caught: cleaning lookahead and stack -0x577d7ac0->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90, 0x577d7ac0 } -0x577d7a90->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60, 0x577d7a90 } -0x577d7a60->Object::~Object { 0x577d7a00, 0x577d7a30, 0x577d7a60 } -0x577d7a30->Object::~Object { 0x577d7a00, 0x577d7a30 } -0x577d7a00->Object::~Object { 0x577d7a00 } +0x567f6ac0->Object::Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90 } +Next token is token 'p' (0x567f6ac0 'p'Exception caught: cleaning lookahead and stack +0x567f6ac0->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90, 0x567f6ac0 } +0x567f6a90->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60, 0x567f6a90 } +0x567f6a60->Object::~Object { 0x567f6a00, 0x567f6a30, 0x567f6a60 } +0x567f6a30->Object::~Object { 0x567f6a00, 0x567f6a30 } +0x567f6a00->Object::~Object { 0x567f6a00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -./c++.at:856: $PREPARSER ./input -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stdout: -stderr: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaaT -./c++.at:659: $PREPARSER ./input -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -675. c++.at:584: ok -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 201402 -./c++.at:574: $PREPARSER ./list - -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -690. c++.at:1422: testing Shared locations ... -./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy -./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaas stderr: stdout: ======== Testing with C++ standard flags: '' ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae stderr: ./c++.at:1363: $PREPARSER ./input aaaal -stdout: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: $PREPARSER ./input i +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: initial-action +./c++.at:1360: $PREPARSER ./input aaaaT ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1360: $PREPARSER ./input aaaaR stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1363: $PREPARSER ./input --debug aaaap +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff87dddf->Object::Object { } -0xff87de88->Object::Object { 0xff87dddf } -0xff87dddf->Object::~Object { 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0xff87de88 } -0xff87de88->Object::~Object { 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263c4->Object::Object { 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0xff87ddd8 } +0xffb59ebf->Object::Object { } +0xffb59f68->Object::Object { 0xffb59ebf } +0xffb59ebf->Object::~Object { 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0xffb59f68 } +0xffb59e5b->Object::Object { 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753c4->Object::Object { 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0xffb59ed0 } Entering state 1 Stack now 0 1 -0xff87de98->Object::Object { 0x578263c4 } +0xffb59f78->Object::Object { 0x585753c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263c4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263c4->Object::~Object { 0x578263c4, 0xff87de98 } -0x578263c4->Object::Object { 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0xff87de98 } + $1 = token 'a' (0x585753c4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753c4->Object::~Object { 0x585753c4, 0xffb59f78 } +0x585753c4->Object::Object { 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0xffb59f78 } Entering state 10 Stack now 0 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4 } -0xff87de88->Object::Object { 0x578263c4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263d4->Object::Object { 0x578263c4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4 } +0xffb59f68->Object::Object { 0x585753c4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753d4->Object::Object { 0x585753c4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } Entering state 1 Stack now 0 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263d4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263d4->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de98 } -0x578263d4->Object::Object { 0x578263c4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de98 } + $1 = token 'a' (0x585753d4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753d4->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0x585753d4->Object::Object { 0x585753c4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0x578263d4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0x578263d4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263e4->Object::Object { 0x578263c4, 0x578263d4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753e4->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } Entering state 1 Stack now 0 10 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263e4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263e4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } -0x578263e4->Object::Object { 0x578263c4, 0x578263d4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } + $1 = token 'a' (0x585753e4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753e4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0x585753e4->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263f4->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753f4->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ed0 } Entering state 1 Stack now 0 10 10 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263f4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263f4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de98 } -0x578263f4->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de98 } + $1 = token 'a' (0x585753f4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753f4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } +0x585753f4->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87dddf, 0xff87de88 } -Next token is token 'p' (0xff87de88 'p'Exception caught: cleaning lookahead and stack -0x578263f4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de88 } -0x578263e4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de88 } -0x578263d4->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de88 } -0x578263c4->Object::~Object { 0x578263c4, 0xff87de88 } -0xff87de88->Object::~Object { 0xff87de88 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'p' (0xffb59f68 'p'Exception caught: cleaning lookahead and stack +0x585753f4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f68 } +0x585753e4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f68 } +0x585753d4->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f68 } +0x585753c4->Object::~Object { 0x585753c4, 0xffb59f68 } +0xffb59f68->Object::~Object { 0xffb59f68 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0xff87dddf->Object::Object { } -0xff87de88->Object::Object { 0xff87dddf } -0xff87dddf->Object::~Object { 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0xff87de88 } -0xff87de88->Object::~Object { 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263c4->Object::Object { 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0xff87ddd8 } +0xffb59ebf->Object::Object { } +0xffb59f68->Object::Object { 0xffb59ebf } +0xffb59ebf->Object::~Object { 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0xffb59f68 } +0xffb59e5b->Object::Object { 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753c4->Object::Object { 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0xffb59ed0 } Entering state 1 Stack now 0 1 -0xff87de98->Object::Object { 0x578263c4 } +0xffb59f78->Object::Object { 0x585753c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263c4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263c4->Object::~Object { 0x578263c4, 0xff87de98 } -0x578263c4->Object::Object { 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0xff87de98 } + $1 = token 'a' (0x585753c4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753c4->Object::~Object { 0x585753c4, 0xffb59f78 } +0x585753c4->Object::Object { 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0xffb59f78 } Entering state 10 Stack now 0 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4 } -0xff87de88->Object::Object { 0x578263c4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263d4->Object::Object { 0x578263c4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4 } +0xffb59f68->Object::Object { 0x585753c4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753d4->Object::Object { 0x585753c4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } Entering state 1 Stack now 0 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263d4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263d4->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de98 } -0x578263d4->Object::Object { 0x578263c4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de98 } + $1 = token 'a' (0x585753d4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753d4->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0x585753d4->Object::Object { 0x585753c4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0x578263d4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0x578263d4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263e4->Object::Object { 0x578263c4, 0x578263d4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753e4->Object::Object { 0x585753c4, 0x585753d4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } Entering state 1 Stack now 0 10 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263e4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263e4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } -0x578263e4->Object::Object { 0x578263c4, 0x578263d4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } + $1 = token 'a' (0x585753e4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753e4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0x585753e4->Object::Object { 0x585753c4, 0x585753d4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87dddf, 0xff87de88 } -Next token is token 'a' (0xff87de88 'a') -0xff87ddd8->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de88 } -0xff87de88->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8, 0xff87de88 } -Shifting token 'a' (0xff87ddd8 'a') -0x578263f4->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87ddd8 } -0xff87ddd8->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87ddd8 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'a' (0xffb59f68 'a') +0xffb59ed0->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f68 } +0xffb59e5b->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0, 0xffb59f68 } +0xffb59e5b->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59e5b, 0xffb59ed0, 0xffb59f68 } +0xffb59f68->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0, 0xffb59f68 } +Shifting token 'a' (0xffb59ed0 'a') +0x585753f4->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59ed0 } +0xffb59e5f->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ed0 } +0xffb59e5f->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59e5f, 0xffb59ed0 } +0xffb59ed0->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ed0 } Entering state 1 Stack now 0 10 10 10 1 -0xff87de98->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4 } +0xffb59f78->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578263f4 'a') --> $$ = nterm item (0xff87de98 'a') -0x578263f4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de98 } -0x578263f4->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de98 } -0xff87de98->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de98 } + $1 = token 'a' (0x585753f4 'a') +-> $$ = nterm item (0xffb59f78 'a') +0x585753f4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } +0x585753f4->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f78 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf, 0xffb59f78 } +0xffb59f78->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f78 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff87dddf->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4 } -0xff87de88->Object::Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87dddf } -0xff87dddf->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87dddf, 0xff87de88 } -Next token is token 'p' (0xff87de88 'p'Exception caught: cleaning lookahead and stack -0x578263f4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0x578263f4, 0xff87de88 } -0x578263e4->Object::~Object { 0x578263c4, 0x578263d4, 0x578263e4, 0xff87de88 } -0x578263d4->Object::~Object { 0x578263c4, 0x578263d4, 0xff87de88 } -0x578263c4->Object::~Object { 0x578263c4, 0xff87de88 } -0xff87de88->Object::~Object { 0xff87de88 } +0xffb59ebf->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4 } +0xffb59f68->Object::Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf } +0xffb59ebf->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59ebf, 0xffb59f68 } +Next token is token 'p' (0xffb59f68 'p'Exception caught: cleaning lookahead and stack +0x585753f4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0x585753f4, 0xffb59f68 } +0x585753e4->Object::~Object { 0x585753c4, 0x585753d4, 0x585753e4, 0xffb59f68 } +0x585753d4->Object::~Object { 0x585753c4, 0x585753d4, 0xffb59f68 } +0x585753c4->Object::~Object { 0x585753c4, 0xffb59f68 } +0xffb59f68->Object::~Object { 0xffb59f68 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr @@ -253943,43 +256530,12 @@ exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae stderr: -stderr: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201703 -./c++.at:568: $PREPARSER ./list -stderr: ./c++.at:1363: $PREPARSER ./input aaaaE -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:1363: $PREPARSER ./input aaaaT stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -253988,44 +256544,14 @@ ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern -stdout: -Modern C++: 201703 -./c++.at:570: $PREPARSER ./list +./c++.at:857: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -254033,143 +256559,136 @@ stderr: exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: stderr: +./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stdout: +./c++.at:1502: $PREPARSER ./parser +stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: $PREPARSER ./input stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +690. c++.at:1422: ok ./c++.at:1361: $PREPARSER ./input --debug aaaap -======== Testing with C++ standard flags: '' -stderr: -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: stderr: -./c++.at:567: $here/modern Starting parse Entering state 0 Stack now 0 Reading a token -0x57c51a00->Object::Object { } -Next token is token 'a' (0x57c51a00 'a') -Shifting token 'a' (0x57c51a00 'a') +0x570dca00->Object::Object { } +Next token is token 'a' (0x570dca00 'a') +Shifting token 'a' (0x570dca00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a00 'a') --> $$ = nterm item (0x57c51a00 'a') + $1 = token 'a' (0x570dca00 'a') +-> $$ = nterm item (0x570dca00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57c51a30->Object::Object { 0x57c51a00 } -Next token is token 'a' (0x57c51a30 'a') -Shifting token 'a' (0x57c51a30 'a') +0x570dca30->Object::Object { 0x570dca00 } +Next token is token 'a' (0x570dca30 'a') +Shifting token 'a' (0x570dca30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a30 'a') --> $$ = nterm item (0x57c51a30 'a') + $1 = token 'a' (0x570dca30 'a') +-> $$ = nterm item (0x570dca30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57c51a60->Object::Object { 0x57c51a00, 0x57c51a30 } -Next token is token 'a' (0x57c51a60 'a') -Shifting token 'a' (0x57c51a60 'a') +0x570dca60->Object::Object { 0x570dca00, 0x570dca30 } +Next token is token 'a' (0x570dca60 'a') +Shifting token 'a' (0x570dca60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a60 'a') --> $$ = nterm item (0x57c51a60 'a') + $1 = token 'a' (0x570dca60 'a') +-> $$ = nterm item (0x570dca60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57c51a90->Object::Object { 0x57c51a00, 0x57c51a30, 0x57c51a60 } -Next token is token 'a' (0x57c51a90 'a') -Shifting token 'a' (0x57c51a90 'a') +0x570dca90->Object::Object { 0x570dca00, 0x570dca30, 0x570dca60 } +Next token is token 'a' (0x570dca90 'a') +Shifting token 'a' (0x570dca90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a90 'a') --> $$ = nterm item (0x57c51a90 'a') + $1 = token 'a' (0x570dca90 'a') +-> $$ = nterm item (0x570dca90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57c51ac0->Object::Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90 } -Next token is token 'p' (0x57c51ac0 'p'Exception caught: cleaning lookahead and stack -0x57c51ac0->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90, 0x57c51ac0 } -0x57c51a90->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90 } -0x57c51a60->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60 } -0x57c51a30->Object::~Object { 0x57c51a00, 0x57c51a30 } -0x57c51a00->Object::~Object { 0x57c51a00 } +0x570dcac0->Object::Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90 } +Next token is token 'p' (0x570dcac0 'p'Exception caught: cleaning lookahead and stack +0x570dcac0->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90, 0x570dcac0 } +0x570dca90->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90 } +0x570dca60->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60 } +0x570dca30->Object::~Object { 0x570dca00, 0x570dca30 } +0x570dca00->Object::~Object { 0x570dca00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -Modern C++: 201703 -./c++.at:567: $PREPARSER ./list stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57c51a00->Object::Object { } -Next token is token 'a' (0x57c51a00 'a') -Shifting token 'a' (0x57c51a00 'a') +0x570dca00->Object::Object { } +Next token is token 'a' (0x570dca00 'a') +Shifting token 'a' (0x570dca00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a00 'a') --> $$ = nterm item (0x57c51a00 'a') + $1 = token 'a' (0x570dca00 'a') +-> $$ = nterm item (0x570dca00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57c51a30->Object::Object { 0x57c51a00 } -Next token is token 'a' (0x57c51a30 'a') -Shifting token 'a' (0x57c51a30 'a') +0x570dca30->Object::Object { 0x570dca00 } +Next token is token 'a' (0x570dca30 'a') +Shifting token 'a' (0x570dca30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a30 'a') --> $$ = nterm item (0x57c51a30 'a') + $1 = token 'a' (0x570dca30 'a') +-> $$ = nterm item (0x570dca30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57c51a60->Object::Object { 0x57c51a00, 0x57c51a30 } -Next token is token 'a' (0x57c51a60 'a') -Shifting token 'a' (0x57c51a60 'a') +0x570dca60->Object::Object { 0x570dca00, 0x570dca30 } +Next token is token 'a' (0x570dca60 'a') +Shifting token 'a' (0x570dca60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a60 'a') --> $$ = nterm item (0x57c51a60 'a') + $1 = token 'a' (0x570dca60 'a') +-> $$ = nterm item (0x570dca60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57c51a90->Object::Object { 0x57c51a00, 0x57c51a30, 0x57c51a60 } -Next token is token 'a' (0x57c51a90 'a') -Shifting token 'a' (0x57c51a90 'a') +0x570dca90->Object::Object { 0x570dca00, 0x570dca30, 0x570dca60 } +Next token is token 'a' (0x570dca90 'a') +Shifting token 'a' (0x570dca90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57c51a90 'a') --> $$ = nterm item (0x57c51a90 'a') + $1 = token 'a' (0x570dca90 'a') +-> $$ = nterm item (0x570dca90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57c51ac0->Object::Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90 } -Next token is token 'p' (0x57c51ac0 'p'Exception caught: cleaning lookahead and stack -0x57c51ac0->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90, 0x57c51ac0 } -0x57c51a90->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60, 0x57c51a90 } -0x57c51a60->Object::~Object { 0x57c51a00, 0x57c51a30, 0x57c51a60 } -0x57c51a30->Object::~Object { 0x57c51a00, 0x57c51a30 } -0x57c51a00->Object::~Object { 0x57c51a00 } +0x570dcac0->Object::Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90 } +Next token is token 'p' (0x570dcac0 'p'Exception caught: cleaning lookahead and stack +0x570dcac0->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90, 0x570dcac0 } +0x570dca90->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60, 0x570dca90 } +0x570dca60->Object::~Object { 0x570dca00, 0x570dca30, 0x570dca60 } +0x570dca30->Object::~Object { 0x570dca00, 0x570dca30 } +0x570dca00->Object::~Object { 0x570dca00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr @@ -254177,340 +256696,250 @@ exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -stderr: exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:1361: $PREPARSER ./input aaaaE stderr: -stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1362: $PREPARSER ./input aaaas stderr: ./c++.at:1361: $PREPARSER ./input aaaaT stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy +stdout: +./c++.at:1362: $PREPARSER ./input aaaas ./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./c++.at:1362: $PREPARSER ./input aaaal ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input i stderr: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaap -./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -stderr: -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0xfffbd65f->Object::Object { } -0xfffbd708->Object::Object { 0xfffbd65f } -0xfffbd65f->Object::~Object { 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0xfffbd708 } -0xfffbd5fb->Object::Object { 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63c4->Object::Object { 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0xfffbd670 } +0xffa9f30f->Object::Object { } +0xffa9f3b8->Object::Object { 0xffa9f30f } +0xffa9f30f->Object::~Object { 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933c4->Object::Object { 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0xffa9f308 } Entering state 2 Stack now 0 2 -0xfffbd718->Object::Object { 0x579e63c4 } +0xffa9f3c8->Object::Object { 0x565933c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63c4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63c4->Object::~Object { 0x579e63c4, 0xfffbd718 } -0x579e63c4->Object::Object { 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0xfffbd718 } + $1 = token 'a' (0x565933c4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933c4->Object::~Object { 0x565933c4, 0xffa9f3c8 } +0x565933c4->Object::Object { 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4 } -0xfffbd708->Object::Object { 0x579e63c4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63d4->Object::Object { 0x579e63c4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933d4->Object::Object { 0x565933c4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f308 } Entering state 2 Stack now 0 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63d4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63d4->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0x579e63d4->Object::Object { 0x579e63c4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } + $1 = token 'a' (0x565933d4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933d4->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } +0x565933d4->Object::Object { 0x565933c4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63e4->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933e4->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308 } Entering state 2 Stack now 0 11 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63e4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63e4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0x579e63e4->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } + $1 = token 'a' (0x565933e4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933e4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } +0x565933e4->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63f4->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933f4->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f308 } Entering state 2 Stack now 0 11 11 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63f4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63f4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } -0x579e63f4->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } + $1 = token 'a' (0x565933f4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933f4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3c8 } +0x565933f4->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'p' (0xfffbd708 'p'Exception caught: cleaning lookahead and stack -0x579e63f4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd708 } -0x579e63e4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd708 } -0x579e63d4->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd708 } -0x579e63c4->Object::~Object { 0x579e63c4, 0xfffbd708 } -0xfffbd708->Object::~Object { 0xfffbd708 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'p' (0xffa9f3b8 'p'Exception caught: cleaning lookahead and stack +0x565933f4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3b8 } +0x565933e4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3b8 } +0x565933d4->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3b8 } +0x565933c4->Object::~Object { 0x565933c4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0xffa9f3b8 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:850: $PREPARSER ./input -stderr: stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0xfffbd65f->Object::Object { } -0xfffbd708->Object::Object { 0xfffbd65f } -0xfffbd65f->Object::~Object { 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0xfffbd708 } -0xfffbd5fb->Object::Object { 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63c4->Object::Object { 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0xfffbd670 } +0xffa9f30f->Object::Object { } +0xffa9f3b8->Object::Object { 0xffa9f30f } +0xffa9f30f->Object::~Object { 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933c4->Object::Object { 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0xffa9f308 } Entering state 2 Stack now 0 2 -0xfffbd718->Object::Object { 0x579e63c4 } +0xffa9f3c8->Object::Object { 0x565933c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63c4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63c4->Object::~Object { 0x579e63c4, 0xfffbd718 } -0x579e63c4->Object::Object { 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0xfffbd718 } + $1 = token 'a' (0x565933c4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933c4->Object::~Object { 0x565933c4, 0xffa9f3c8 } +0x565933c4->Object::Object { 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4 } -0xfffbd708->Object::Object { 0x579e63c4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63d4->Object::Object { 0x579e63c4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933d4->Object::Object { 0x565933c4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f308 } Entering state 2 Stack now 0 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63d4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63d4->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0x579e63d4->Object::Object { 0x579e63c4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } + $1 = token 'a' (0x565933d4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933d4->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } +0x565933d4->Object::Object { 0x565933c4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63e4->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933e4->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308 } Entering state 2 Stack now 0 11 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63e4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63e4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0x579e63e4->Object::Object { 0x579e63c4, 0x579e63d4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } + $1 = token 'a' (0x565933e4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933e4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } +0x565933e4->Object::Object { 0x565933c4, 0x565933d4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'a' (0xfffbd708 'a') -0xfffbd670->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd708 } -0xfffbd5fb->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670, 0xfffbd708 } -0xfffbd5fb->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd5fb, 0xfffbd670, 0xfffbd708 } -0xfffbd708->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670, 0xfffbd708 } -Shifting token 'a' (0xfffbd670 'a') -0x579e63f4->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd670 } -0xfffbd5ff->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd670 } -0xfffbd5ff->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd5ff, 0xfffbd670 } -0xfffbd670->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd670 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'a' (0xffa9f3b8 'a') +0xffa9f308->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308, 0xffa9f3b8 } +Shifting token 'a' (0xffa9f308 'a') +0x565933f4->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f308 } +0xffa9f308->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f308 } Entering state 2 Stack now 0 11 11 11 2 -0xfffbd718->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4 } +0xffa9f3c8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579e63f4 'a') --> $$ = nterm item (0xfffbd718 'a') -0x579e63f4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } -0x579e63f4->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd718 } -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f, 0xfffbd718 } -0xfffbd718->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd718 } + $1 = token 'a' (0x565933f4 'a') +-> $$ = nterm item (0xffa9f3c8 'a') +0x565933f4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3c8 } +0x565933f4->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3c8 } +0xffa9f3c8->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3c8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfffbd65f->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4 } -0xfffbd708->Object::Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f } -0xfffbd65f->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd65f, 0xfffbd708 } -Next token is token 'p' (0xfffbd708 'p'Exception caught: cleaning lookahead and stack -0x579e63f4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0x579e63f4, 0xfffbd708 } -0x579e63e4->Object::~Object { 0x579e63c4, 0x579e63d4, 0x579e63e4, 0xfffbd708 } -0x579e63d4->Object::~Object { 0x579e63c4, 0x579e63d4, 0xfffbd708 } -0x579e63c4->Object::~Object { 0x579e63c4, 0xfffbd708 } -0xfffbd708->Object::~Object { 0xfffbd708 } +0xffa9f30f->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4 } +0xffa9f3b8->Object::Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f30f } +0xffa9f30f->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f30f, 0xffa9f3b8 } +Next token is token 'p' (0xffa9f3b8 'p'Exception caught: cleaning lookahead and stack +0x565933f4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0x565933f4, 0xffa9f3b8 } +0x565933e4->Object::~Object { 0x565933c4, 0x565933d4, 0x565933e4, 0xffa9f3b8 } +0x565933d4->Object::~Object { 0x565933c4, 0x565933d4, 0xffa9f3b8 } +0x565933c4->Object::~Object { 0x565933c4, 0xffa9f3b8 } +0xffa9f3b8->Object::~Object { 0xffa9f3b8 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./c++.at:1362: $PREPARSER ./input aaaaE stderr: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaT @@ -254518,160 +256947,110 @@ ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaR stderr: -stderr: -stdout: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +678. c++.at:848: ok + + stderr: -stderr: -stdout: stdout: -./c++.at:571: $here/modern ======== Testing with C++ standard flags: '' -stdout: ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -Modern C++: 201703 -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:569: $here/modern -stdout: -Modern C++: 201703 -./c++.at:569: $PREPARSER ./list +./c++.at:1555: $PREPARSER ./test stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +692. java.at:25: testing Java invalid directives ... +./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y +./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y +692. java.at:25: ok + +693. java.at:186: testing Java parser class and package names ... +./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:188: grep '[mb]4_' YYParser.y stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list +693. java.at:186: skipped (java.at:188) + +694. java.at:217: testing Java parser class modifiers ... +./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:219: grep '[mb]4_' YYParser.y +stdout: +694. java.at:217: skipped (java.at:219) stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + +stdout: +./c++.at:858: $PREPARSER ./input +695. java.at:287: testing Java parser class extends and implements ... +./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./java.at:289: grep '[mb]4_' YYParser.y stdout: -./c++.at:566: $here/modern +695. java.at:287: skipped (java.at:289) + +696. java.at:307: testing Java %parse-param and %lex-param ... +./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:309: grep '[mb]4_' YYParser.y +stdout: +696. java.at:307: skipped (java.at:309) + +697. java.at:381: testing Java throws specifications ... +./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:441: grep '[mb]4_' YYParser.y +stdout: +697. java.at:381: stderr: stdout: -Modern C++: 202002 -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + skipped (java.at:441) + +698. java.at:470: testing Java constructor init and init_throws ... +./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:475: grep '[mb]4_' YYParser.y stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +698. java.at:470: skipped (java.at:475) + +699. java.at:497: testing Java value, position, and location types ... +./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:499: grep '[mb]4_' YYParser.y stdout: -./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS +699. java.at:497: skipped (java.at:499) + +700. java.at:528: testing Java syntax error handling without error token ... +./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y +700. java.at:528: skipped (java.at:580) + +701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... +./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y +./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java +./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y stderr: stdout: ./c++.at:1066: $PREPARSER ./input < in +702. javapush.at:217: testing Trivial Push Parser with %initial-action ... +./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y stderr: error: invalid expression caught error error: invalid character caught error ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java ./c++.at:1066: $PREPARSER ./input < in +./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y stderr: error: invalid expression ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -254679,42 +257058,17 @@ stderr: error: invalid character ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:856: $PREPARSER ./input -======== Testing with C++ standard flags: '' -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -664. c++.at:107: ok -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java +702. javapush.at:217: skipped (javapush.at:230) +./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java + +./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +701. javapush.at:172: skipped (javapush.at:207) stderr: stdout: +703. d.at:103: testing D parser class extends and implements ... +./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction @@ -254726,1292 +257080,844 @@ ./c++.at:1360: $PREPARSER ./input i stderr: exception caught: initial-action +704. d.at:138: testing D parser class api.token.raw true by default ... +./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaap stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -691. c++.at:1517: testing Default action ... -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:573: $here/modern -stdout: ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -Modern C++: 201703 Starting parse Entering state 0 Stack now 0 Reading a token -0x56fd1a00->Object::Object { } -Next token is token 'a' (0x56fd1a00 'a') -Shifting token 'a' (0x56fd1a00 'a') +0x5756fa00->Object::Object { } +Next token is token 'a' (0x5756fa00 'a') +Shifting token 'a' (0x5756fa00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a00 'a') --> $$ = nterm item (0x56fd1a00 'a') + $1 = token 'a' (0x5756fa00 'a') +-> $$ = nterm item (0x5756fa00 'a') Entering state 11 Stack now 0 11 Reading a token -0x56fd1a30->Object::Object { 0x56fd1a00 } -Next token is token 'a' (0x56fd1a30 'a') -Shifting token 'a' (0x56fd1a30 'a') +0x5756fa30->Object::Object { 0x5756fa00 } +Next token is token 'a' (0x5756fa30 'a') +Shifting token 'a' (0x5756fa30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a30 'a') --> $$ = nterm item (0x56fd1a30 'a') + $1 = token 'a' (0x5756fa30 'a') +-> $$ = nterm item (0x5756fa30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x56fd1a60->Object::Object { 0x56fd1a00, 0x56fd1a30 } -Next token is token 'a' (0x56fd1a60 'a') -Shifting token 'a' (0x56fd1a60 'a') +0x5756fa60->Object::Object { 0x5756fa00, 0x5756fa30 } +Next token is token 'a' (0x5756fa60 'a') +Shifting token 'a' (0x5756fa60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a60 'a') --> $$ = nterm item (0x56fd1a60 'a') + $1 = token 'a' (0x5756fa60 'a') +-> $$ = nterm item (0x5756fa60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x56fd1a90->Object::Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60 } -Next token is token 'a' (0x56fd1a90 'a') -Shifting token 'a' (0x56fd1a90 'a') +0x5756fa90->Object::Object { 0x5756fa00, 0x5756fa30, 0x5756fa60 } +Next token is token 'a' (0x5756fa90 'a') +Shifting token 'a' (0x5756fa90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a90 'a') --> $$ = nterm item (0x56fd1a90 'a') + $1 = token 'a' (0x5756fa90 'a') +-> $$ = nterm item (0x5756fa90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x56fd1ac0->Object::Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90 } -Next token is token 'p' (0x56fd1ac0 'p'Exception caught: cleaning lookahead and stack -0x56fd1ac0->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90, 0x56fd1ac0 } -0x56fd1a90->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90 } -0x56fd1a60->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60 } -0x56fd1a30->Object::~Object { 0x56fd1a00, 0x56fd1a30 } -0x56fd1a00->Object::~Object { 0x56fd1a00 } +0x5756fac0->Object::Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90 } +Next token is token 'p' (0x5756fac0 'p'Exception caught: cleaning lookahead and stack +0x5756fac0->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90, 0x5756fac0 } +0x5756fa90->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90 } +0x5756fa60->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60 } +0x5756fa30->Object::~Object { 0x5756fa00, 0x5756fa30 } +0x5756fa00->Object::~Object { 0x5756fa00 } exception caught: printer end { } -./c++.at:573: $PREPARSER ./list ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) Starting parse Entering state 0 Stack now 0 Reading a token -0x56fd1a00->Object::Object { } -Next token is token 'a' (0x56fd1a00 'a') -Shifting token 'a' (0x56fd1a00 'a') +0x5756fa00->Object::Object { } +Next token is token 'a' (0x5756fa00 'a') +Shifting token 'a' (0x5756fa00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a00 'a') --> $$ = nterm item (0x56fd1a00 'a') + $1 = token 'a' (0x5756fa00 'a') +-> $$ = nterm item (0x5756fa00 'a') Entering state 11 Stack now 0 11 Reading a token -0x56fd1a30->Object::Object { 0x56fd1a00 } -Next token is token 'a' (0x56fd1a30 'a') -Shifting token 'a' (0x56fd1a30 'a') +0x5756fa30->Object::Object { 0x5756fa00 } +Next token is token 'a' (0x5756fa30 'a') +Shifting token 'a' (0x5756fa30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a30 'a') --> $$ = nterm item (0x56fd1a30 'a') + $1 = token 'a' (0x5756fa30 'a') +-> $$ = nterm item (0x5756fa30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x56fd1a60->Object::Object { 0x56fd1a00, 0x56fd1a30 } -Next token is token 'a' (0x56fd1a60 'a') -Shifting token 'a' (0x56fd1a60 'a') +0x5756fa60->Object::Object { 0x5756fa00, 0x5756fa30 } +Next token is token 'a' (0x5756fa60 'a') +Shifting token 'a' (0x5756fa60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a60 'a') --> $$ = nterm item (0x56fd1a60 'a') + $1 = token 'a' (0x5756fa60 'a') +-> $$ = nterm item (0x5756fa60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x56fd1a90->Object::Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60 } -Next token is token 'a' (0x56fd1a90 'a') -Shifting token 'a' (0x56fd1a90 'a') +0x5756fa90->Object::Object { 0x5756fa00, 0x5756fa30, 0x5756fa60 } +Next token is token 'a' (0x5756fa90 'a') +Shifting token 'a' (0x5756fa90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56fd1a90 'a') --> $$ = nterm item (0x56fd1a90 'a') + $1 = token 'a' (0x5756fa90 'a') +-> $$ = nterm item (0x5756fa90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x56fd1ac0->Object::Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90 } -Next token is token 'p' (0x56fd1ac0 'p'Exception caught: cleaning lookahead and stack -0x56fd1ac0->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90, 0x56fd1ac0 } -0x56fd1a90->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60, 0x56fd1a90 } -0x56fd1a60->Object::~Object { 0x56fd1a00, 0x56fd1a30, 0x56fd1a60 } -0x56fd1a30->Object::~Object { 0x56fd1a00, 0x56fd1a30 } -0x56fd1a00->Object::~Object { 0x56fd1a00 } +0x5756fac0->Object::Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90 } +Next token is token 'p' (0x5756fac0 'p'Exception caught: cleaning lookahead and stack +0x5756fac0->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90, 0x5756fac0 } +0x5756fa90->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60, 0x5756fa90 } +0x5756fa60->Object::~Object { 0x5756fa00, 0x5756fa30, 0x5756fa60 } +0x5756fa30->Object::~Object { 0x5756fa00, 0x5756fa30 } +0x5756fa00->Object::~Object { 0x5756fa00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./d.at:106: grep '[mb]4_' YYParser.y +./d.at:141: grep '[mb]4_' YYParser.y stderr: exception caught: syntax error +stdout: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -stderr: stdout: +703. d.at:103: 704. d.at:138: ./c++.at:1360: $PREPARSER ./input aaaaE + skipped (d.at:106) + skipped (d.at:141) +stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: ./check + +./c++.at:1360: $PREPARSER ./input aaaaT + +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... +./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... +./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +stdout: +./c++.at:1555: $PREPARSER ./test +stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y stdout: ./c++.at:1363: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1360: $PREPARSER ./input aaaaR ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1363: $PREPARSER ./input i -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' exception caught: initial-action -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaap stderr: -stdout: -stderr: -./c++.at:1502: $PREPARSER ./parser ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff98c4df->Object::Object { } -0xff98c588->Object::Object { 0xff98c4df } -0xff98c4df->Object::~Object { 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0xff98c588 } -0xff98c47b->Object::Object { 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3c4->Object::Object { 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { } +0xffb0cdf8->Object::Object { 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623c4->Object::Object { 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0xffb0cd38 } Entering state 1 Stack now 0 1 -0xff98c598->Object::Object { 0x56eed3c4 } +0xffb0ce08->Object::Object { 0x57f623c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3c4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3c4->Object::~Object { 0x56eed3c4, 0xff98c598 } -0x56eed3c4->Object::Object { 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0xff98c598 } + $1 = token 'a' (0x57f623c4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623c4->Object::~Object { 0x57f623c4, 0xffb0ce08 } +0x57f623c4->Object::Object { 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0xffb0ce08 } Entering state 10 Stack now 0 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4 } -0xff98c588->Object::Object { 0x56eed3c4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3d4->Object::Object { 0x56eed3c4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623d4->Object::Object { 0x57f623c4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38 } Entering state 1 Stack now 0 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3d4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3d4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0x56eed3d4->Object::Object { 0x56eed3c4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } + $1 = token 'a' (0x57f623d4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623d4->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } +0x57f623d4->Object::Object { 0x57f623c4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3e4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623e4->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38 } Entering state 1 Stack now 0 10 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3e4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3e4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0x56eed3e4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } + $1 = token 'a' (0x57f623e4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623e4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } +0x57f623e4->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3f4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623f4->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd38 } Entering state 1 Stack now 0 10 10 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3f4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3f4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } -0x56eed3f4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } + $1 = token 'a' (0x57f623f4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623f4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0ce08 } +0x57f623f4->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df, 0xff98c588 } -Next token is token 'p' (0xff98c588 'p'Exception caught: cleaning lookahead and stack -0x56eed3f4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c588 } -0x56eed3e4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c588 } -0x56eed3d4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c588 } -0x56eed3c4->Object::~Object { 0x56eed3c4, 0xff98c588 } -0xff98c588->Object::~Object { 0xff98c588 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'p' (0xffb0cdf8 'p'Exception caught: cleaning lookahead and stack +0x57f623f4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cdf8 } +0x57f623e4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cdf8 } +0x57f623d4->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cdf8 } +0x57f623c4->Object::~Object { 0x57f623c4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0xffb0cdf8 } exception caught: printer end { } -690. c++.at:1422: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff98c4df->Object::Object { } -0xff98c588->Object::Object { 0xff98c4df } -0xff98c4df->Object::~Object { 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0xff98c588 } -0xff98c47b->Object::Object { 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3c4->Object::Object { 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { } +0xffb0cdf8->Object::Object { 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623c4->Object::Object { 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0xffb0cd38 } Entering state 1 Stack now 0 1 -0xff98c598->Object::Object { 0x56eed3c4 } +0xffb0ce08->Object::Object { 0x57f623c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3c4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3c4->Object::~Object { 0x56eed3c4, 0xff98c598 } -0x56eed3c4->Object::Object { 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0xff98c598 } + $1 = token 'a' (0x57f623c4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623c4->Object::~Object { 0x57f623c4, 0xffb0ce08 } +0x57f623c4->Object::Object { 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0xffb0ce08 } Entering state 10 Stack now 0 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4 } -0xff98c588->Object::Object { 0x56eed3c4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3d4->Object::Object { 0x56eed3c4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623d4->Object::Object { 0x57f623c4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38 } Entering state 1 Stack now 0 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3d4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3d4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0x56eed3d4->Object::Object { 0x56eed3c4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } + $1 = token 'a' (0x57f623d4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623d4->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } +0x57f623d4->Object::Object { 0x57f623c4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3e4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623e4->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38 } Entering state 1 Stack now 0 10 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3e4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3e4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0x56eed3e4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } + $1 = token 'a' (0x57f623e4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623e4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } +0x57f623e4->Object::Object { 0x57f623c4, 0x57f623d4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4df, 0xff98c588 } -Next token is token 'a' (0xff98c588 'a') -0xff98c4f0->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c588 } -0xff98c47b->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0, 0xff98c588 } -0xff98c47b->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c47b, 0xff98c4f0, 0xff98c588 } -0xff98c588->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0, 0xff98c588 } -Shifting token 'a' (0xff98c4f0 'a') -0x56eed3f4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c4f0 } -0xff98c47f->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4f0 } -0xff98c47f->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c47f, 0xff98c4f0 } -0xff98c4f0->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4f0 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'a' (0xffb0cdf8 'a') +0xffb0cd38->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38, 0xffb0cdf8 } +Shifting token 'a' (0xffb0cd38 'a') +0x57f623f4->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cd38 } +0xffb0cd38->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd38 } Entering state 1 Stack now 0 10 10 10 1 -0xff98c598->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4 } +0xffb0ce08->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56eed3f4 'a') --> $$ = nterm item (0xff98c598 'a') -0x56eed3f4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } -0x56eed3f4->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c598 } -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df, 0xff98c598 } -0xff98c598->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c598 } + $1 = token 'a' (0x57f623f4 'a') +-> $$ = nterm item (0xffb0ce08 'a') +0x57f623f4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0ce08 } +0x57f623f4->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0ce08 } +0xffb0ce08->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0ce08 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff98c4df->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4 } -0xff98c588->Object::Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df } -0xff98c4df->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c4df, 0xff98c588 } -Next token is token 'p' (0xff98c588 'p'Exception caught: cleaning lookahead and stack -0x56eed3f4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0x56eed3f4, 0xff98c588 } -0x56eed3e4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0x56eed3e4, 0xff98c588 } -0x56eed3d4->Object::~Object { 0x56eed3c4, 0x56eed3d4, 0xff98c588 } -0x56eed3c4->Object::~Object { 0x56eed3c4, 0xff98c588 } -0xff98c588->Object::~Object { 0xff98c588 } +0xffb0cd3f->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4 } +0xffb0cdf8->Object::Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd3f } +0xffb0cd3f->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cd3f, 0xffb0cdf8 } +Next token is token 'p' (0xffb0cdf8 'p'Exception caught: cleaning lookahead and stack +0x57f623f4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0x57f623f4, 0xffb0cdf8 } +0x57f623e4->Object::~Object { 0x57f623c4, 0x57f623d4, 0x57f623e4, 0xffb0cdf8 } +0x57f623d4->Object::~Object { 0x57f623c4, 0x57f623d4, 0xffb0cdf8 } +0x57f623c4->Object::~Object { 0x57f623c4, 0xffb0cdf8 } +0xffb0cdf8->Object::~Object { 0xffb0cdf8 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr +stderr: stdout: exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae stderr: +stdout: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: +stdout: +./c++.at:858: $PREPARSER ./input ./c++.at:1363: $PREPARSER ./input aaaaE stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1363: $PREPARSER ./input aaaaT stderr: - ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaR stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -692. java.at:25: testing Java invalid directives ... -./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -stderr: -stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list -stderr: -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -stdout: -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -./c++.at:1064: $PREPARSER ./input < in -stderr: -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -./c++.at:1065: $PREPARSER ./input < in -stderr: -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1065: $PREPARSER ./input < in -stderr: -682. c++.at:1064: ok -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS - -692. java.at:25: ok -683. c++.at:1065: ok - - +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -695. java.at:287: testing Java parser class extends and implements ... -./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y ./c++.at:1361: $PREPARSER ./input aaaal -693. java.at:186: testing Java parser class and package names ... -stderr: -./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stderr: -694. java.at:217: testing Java parser class modifiers ... -./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -stdout: exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./java.at:219: grep '[mb]4_' YYParser.y -stdout: ./c++.at:1361: $PREPARSER ./input i -694. java.at:217: stderr: +stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./java.at:188: grep '[mb]4_' YYParser.y - skipped (java.at:219) -./java.at:289: grep '[mb]4_' YYParser.y -stdout: -stdout: - ./c++.at:1361: $PREPARSER ./input aaaap -693. java.at:186: 695. java.at:287: stderr: - skipped (java.at:188) - skipped (java.at:289) +stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: -stderr: -stderr: -stdout: -stdout: - - -./c++.at:1362: $PREPARSER ./input aaaas Starting parse Entering state 0 Stack now 0 Reading a token -0x567eca00->Object::Object { } -Next token is token 'a' (0x567eca00 'a') -Shifting token 'a' (0x567eca00 'a') +0x581b5a00->Object::Object { } +Next token is token 'a' (0x581b5a00 'a') +Shifting token 'a' (0x581b5a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca00 'a') --> $$ = nterm item (0x567eca00 'a') + $1 = token 'a' (0x581b5a00 'a') +-> $$ = nterm item (0x581b5a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x567eca30->Object::Object { 0x567eca00 } -Next token is token 'a' (0x567eca30 'a') -Shifting token 'a' (0x567eca30 'a') +0x581b5a30->Object::Object { 0x581b5a00 } +Next token is token 'a' (0x581b5a30 'a') +Shifting token 'a' (0x581b5a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca30 'a') --> $$ = nterm item (0x567eca30 'a') + $1 = token 'a' (0x581b5a30 'a') +-> $$ = nterm item (0x581b5a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x567eca60->Object::Object { 0x567eca00, 0x567eca30 } -Next token is token 'a' (0x567eca60 'a') -Shifting token 'a' (0x567eca60 'a') +0x581b5a60->Object::Object { 0x581b5a00, 0x581b5a30 } +Next token is token 'a' (0x581b5a60 'a') +Shifting token 'a' (0x581b5a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca60 'a') --> $$ = nterm item (0x567eca60 'a') + $1 = token 'a' (0x581b5a60 'a') +-> $$ = nterm item (0x581b5a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x567eca90->Object::Object { 0x567eca00, 0x567eca30, 0x567eca60 } -Next token is token 'a' (0x567eca90 'a') -Shifting token 'a' (0x567eca90 'a') +0x581b5a90->Object::Object { 0x581b5a00, 0x581b5a30, 0x581b5a60 } +Next token is token 'a' (0x581b5a90 'a') +Shifting token 'a' (0x581b5a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca90 'a') --> $$ = nterm item (0x567eca90 'a') + $1 = token 'a' (0x581b5a90 'a') +-> $$ = nterm item (0x581b5a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x567ecac0->Object::Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90 } -Next token is token 'p' (0x567ecac0 'p'Exception caught: cleaning lookahead and stack -0x567ecac0->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90, 0x567ecac0 } -0x567eca90->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90 } -0x567eca60->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60 } -0x567eca30->Object::~Object { 0x567eca00, 0x567eca30 } -0x567eca00->Object::~Object { 0x567eca00 } +0x581b5ac0->Object::Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90 } +Next token is token 'p' (0x581b5ac0 'p'Exception caught: cleaning lookahead and stack +0x581b5ac0->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90, 0x581b5ac0 } +0x581b5a90->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90 } +0x581b5a60->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60 } +0x581b5a30->Object::~Object { 0x581b5a00, 0x581b5a30 } +0x581b5a00->Object::~Object { 0x581b5a00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x567eca00->Object::Object { } -Next token is token 'a' (0x567eca00 'a') -Shifting token 'a' (0x567eca00 'a') +0x581b5a00->Object::Object { } +Next token is token 'a' (0x581b5a00 'a') +Shifting token 'a' (0x581b5a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca00 'a') --> $$ = nterm item (0x567eca00 'a') + $1 = token 'a' (0x581b5a00 'a') +-> $$ = nterm item (0x581b5a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x567eca30->Object::Object { 0x567eca00 } -Next token is token 'a' (0x567eca30 'a') -Shifting token 'a' (0x567eca30 'a') +0x581b5a30->Object::Object { 0x581b5a00 } +Next token is token 'a' (0x581b5a30 'a') +Shifting token 'a' (0x581b5a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca30 'a') --> $$ = nterm item (0x567eca30 'a') + $1 = token 'a' (0x581b5a30 'a') +-> $$ = nterm item (0x581b5a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x567eca60->Object::Object { 0x567eca00, 0x567eca30 } -Next token is token 'a' (0x567eca60 'a') -Shifting token 'a' (0x567eca60 'a') +0x581b5a60->Object::Object { 0x581b5a00, 0x581b5a30 } +Next token is token 'a' (0x581b5a60 'a') +Shifting token 'a' (0x581b5a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca60 'a') --> $$ = nterm item (0x567eca60 'a') + $1 = token 'a' (0x581b5a60 'a') +-> $$ = nterm item (0x581b5a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x567eca90->Object::Object { 0x567eca00, 0x567eca30, 0x567eca60 } -Next token is token 'a' (0x567eca90 'a') -Shifting token 'a' (0x567eca90 'a') +0x581b5a90->Object::Object { 0x581b5a00, 0x581b5a30, 0x581b5a60 } +Next token is token 'a' (0x581b5a90 'a') +Shifting token 'a' (0x581b5a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x567eca90 'a') --> $$ = nterm item (0x567eca90 'a') + $1 = token 'a' (0x581b5a90 'a') +-> $$ = nterm item (0x581b5a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x567ecac0->Object::Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90 } -Next token is token 'p' (0x567ecac0 'p'Exception caught: cleaning lookahead and stack -0x567ecac0->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90, 0x567ecac0 } -0x567eca90->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60, 0x567eca90 } -0x567eca60->Object::~Object { 0x567eca00, 0x567eca30, 0x567eca60 } -0x567eca30->Object::~Object { 0x567eca00, 0x567eca30 } -0x567eca00->Object::~Object { 0x567eca00 } +0x581b5ac0->Object::Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90 } +Next token is token 'p' (0x581b5ac0 'p'Exception caught: cleaning lookahead and stack +0x581b5ac0->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90, 0x581b5ac0 } +0x581b5a90->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60, 0x581b5a90 } +0x581b5a60->Object::~Object { 0x581b5a00, 0x581b5a30, 0x581b5a60 } +0x581b5a30->Object::~Object { 0x581b5a00, 0x581b5a30 } +0x581b5a00->Object::~Object { 0x581b5a00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr stdout: exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae -./c++.at:1362: $PREPARSER ./input aaaal stderr: exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: $PREPARSER ./input i ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -696. java.at:307: testing Java %parse-param and %lex-param ... -exception caught: initial-action -./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./c++.at:1361: $PREPARSER ./input aaaaT -./c++.at:1362: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -697. java.at:381: testing Java throws specifications ... +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -./c++.at:1362: $PREPARSER ./input --debug aaaap -./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:1362: $PREPARSER ./input aaaal stderr: -./java.at:309: grep '[mb]4_' YYParser.y -698. java.at:470: testing Java constructor init and init_throws ... -./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0xfff1a75f->Object::Object { } -0xfff1a808->Object::Object { 0xfff1a75f } -0xfff1a75f->Object::~Object { 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0xfff1a808 } -0xfff1a6fb->Object::Object { 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913c4->Object::Object { 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0xfff1a770 } +0xffde956f->Object::Object { } +0xffde9618->Object::Object { 0xffde956f } +0xffde956f->Object::~Object { 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0xffde9618 } +0xffde9618->Object::~Object { 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093c4->Object::Object { 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0xffde9568 } Entering state 2 Stack now 0 2 -0xfff1a818->Object::Object { 0x568913c4 } +0xffde9628->Object::Object { 0x581093c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913c4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913c4->Object::~Object { 0x568913c4, 0xfff1a818 } -0x568913c4->Object::Object { 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0xfff1a818 } + $1 = token 'a' (0x581093c4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093c4->Object::~Object { 0x581093c4, 0xffde9628 } +0x581093c4->Object::Object { 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0xffde9628 } Entering state 11 Stack now 0 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4 } -0xfff1a808->Object::Object { 0x568913c4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913d4->Object::Object { 0x568913c4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4 } +0xffde9618->Object::Object { 0x581093c4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093d4->Object::Object { 0x581093c4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9568 } Entering state 2 Stack now 0 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913d4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913d4->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0x568913d4->Object::Object { 0x568913c4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a818 } + $1 = token 'a' (0x581093d4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093d4->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9628 } +0x581093d4->Object::Object { 0x581093c4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9628 } Entering state 11 Stack now 0 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913e4->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0x581093d4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093e4->Object::Object { 0x581093c4, 0x581093d4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568 } Entering state 2 Stack now 0 11 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913e4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913e4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0x568913e4->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } + $1 = token 'a' (0x581093e4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093e4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } +0x581093e4->Object::Object { 0x581093c4, 0x581093d4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913f4->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093f4->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9568 } Entering state 2 Stack now 0 11 11 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913f4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913f4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } -0x568913f4->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } + $1 = token 'a' (0x581093f4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093f4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9628 } +0x581093f4->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9628 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'p' (0xfff1a808 'p'Exception caught: cleaning lookahead and stack -0x568913f4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a808 } -0x568913e4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a808 } -0x568913d4->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a808 } -0x568913c4->Object::~Object { 0x568913c4, 0xfff1a808 } -0xfff1a808->Object::~Object { 0xfff1a808 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde956f, 0xffde9618 } +Next token is token 'p' (0xffde9618 'p'Exception caught: cleaning lookahead and stack +0x581093f4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9618 } +0x581093e4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9618 } +0x581093d4->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9618 } +0x581093c4->Object::~Object { 0x581093c4, 0xffde9618 } +0xffde9618->Object::~Object { 0xffde9618 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -696. java.at:307: ./java.at:475: grep '[mb]4_' YYParser.y Starting parse Entering state 0 Stack now 0 Reading a token -0xfff1a75f->Object::Object { } -0xfff1a808->Object::Object { 0xfff1a75f } -0xfff1a75f->Object::~Object { 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0xfff1a808 } -0xfff1a6fb->Object::Object { 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913c4->Object::Object { 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0xfff1a770 } +0xffde956f->Object::Object { } +0xffde9618->Object::Object { 0xffde956f } +0xffde956f->Object::~Object { 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0xffde9618 } +0xffde9618->Object::~Object { 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093c4->Object::Object { 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0xffde9568 } Entering state 2 Stack now 0 2 -0xfff1a818->Object::Object { 0x568913c4 } +0xffde9628->Object::Object { 0x581093c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913c4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913c4->Object::~Object { 0x568913c4, 0xfff1a818 } -0x568913c4->Object::Object { 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0xfff1a818 } + $1 = token 'a' (0x581093c4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093c4->Object::~Object { 0x581093c4, 0xffde9628 } +0x581093c4->Object::Object { 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0xffde9628 } Entering state 11 Stack now 0 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4 } -0xfff1a808->Object::Object { 0x568913c4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913d4->Object::Object { 0x568913c4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4 } +0xffde9618->Object::Object { 0x581093c4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093d4->Object::Object { 0x581093c4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9568 } Entering state 2 Stack now 0 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913d4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913d4->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0x568913d4->Object::Object { 0x568913c4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a818 } + $1 = token 'a' (0x581093d4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093d4->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9628 } +0x581093d4->Object::Object { 0x581093c4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9628 } Entering state 11 Stack now 0 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913e4->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0x581093d4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093e4->Object::Object { 0x581093c4, 0x581093d4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568 } Entering state 2 Stack now 0 11 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913e4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913e4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0x568913e4->Object::Object { 0x568913c4, 0x568913d4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } + $1 = token 'a' (0x581093e4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093e4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } +0x581093e4->Object::Object { 0x581093c4, 0x581093d4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'a' (0xfff1a808 'a') -0xfff1a770->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a808 } -0xfff1a6fb->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770, 0xfff1a808 } -0xfff1a6fb->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a6fb, 0xfff1a770, 0xfff1a808 } -0xfff1a808->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770, 0xfff1a808 } -Shifting token 'a' (0xfff1a770 'a') -0x568913f4->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a770 } -0xfff1a6ff->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a770 } -0xfff1a6ff->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a6ff, 0xfff1a770 } -0xfff1a770->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a770 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde956f, 0xffde9618 } +Next token is token 'a' (0xffde9618 'a') +0xffde9568->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9618 } +0xffde9618->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568, 0xffde9618 } +Shifting token 'a' (0xffde9568 'a') +0x581093f4->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9568 } +0xffde9568->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9568 } Entering state 2 Stack now 0 11 11 11 2 -0xfff1a818->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4 } +0xffde9628->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x568913f4 'a') --> $$ = nterm item (0xfff1a818 'a') -0x568913f4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } -0x568913f4->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a818 } -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f, 0xfff1a818 } -0xfff1a818->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a818 } + $1 = token 'a' (0x581093f4 'a') +-> $$ = nterm item (0xffde9628 'a') +0x581093f4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9628 } +0x581093f4->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9628 } +0xffde9628->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9628 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfff1a75f->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4 } -0xfff1a808->Object::Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f } -0xfff1a75f->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a75f, 0xfff1a808 } -Next token is token 'p' (0xfff1a808 'p'Exception caught: cleaning lookahead and stack -0x568913f4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0x568913f4, 0xfff1a808 } -0x568913e4->Object::~Object { 0x568913c4, 0x568913d4, 0x568913e4, 0xfff1a808 } -0x568913d4->Object::~Object { 0x568913c4, 0x568913d4, 0xfff1a808 } -0x568913c4->Object::~Object { 0x568913c4, 0xfff1a808 } -0xfff1a808->Object::~Object { 0xfff1a808 } +0xffde956f->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4 } +0xffde9618->Object::Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde956f } +0xffde956f->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde956f, 0xffde9618 } +Next token is token 'p' (0xffde9618 'p'Exception caught: cleaning lookahead and stack +0x581093f4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0x581093f4, 0xffde9618 } +0x581093e4->Object::~Object { 0x581093c4, 0x581093d4, 0x581093e4, 0xffde9618 } +0x581093d4->Object::~Object { 0x581093c4, 0x581093d4, 0xffde9618 } +0x581093c4->Object::~Object { 0x581093c4, 0xffde9618 } +0xffde9618->Object::~Object { 0xffde9618 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr - skipped (java.at:309) -stdout: stdout: - exception caught: printer -698. java.at:470: ./c++.at:1362: $PREPARSER ./input aaaae - skipped (java.at:475) +./c++.at:1362: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./java.at:441: grep '[mb]4_' YYParser.y -stdout: ./c++.at:1362: $PREPARSER ./input aaaaE - -697. java.at:381: stderr: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (java.at:441) ./c++.at:1362: $PREPARSER ./input aaaaT -699. java.at:497: testing Java value, position, and location types ... stderr: -./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y - ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaR stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -700. java.at:528: testing Java syntax error handling without error token ... -./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y -./java.at:499: grep '[mb]4_' YYParser.y -stdout: -699. java.at:497: skipped (java.at:499) -700. java.at:528: skipped (java.at:580) -701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... -./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y - - -./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java -./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y -702. javapush.at:217: testing Trivial Push Parser with %initial-action ... -./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -703. d.at:103: testing D parser class extends and implements ... -./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java -./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -./d.at:106: grep '[mb]4_' YYParser.y -./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java -stdout: -./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -703. d.at:103: skipped (d.at:106) -stderr: -stdout: -702. javapush.at:217: ./c++.at:568: $here/modern -stdout: - skipped (javapush.at:230) -Modern C++: 202002 -./c++.at:568: $PREPARSER ./list - -./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -701. javapush.at:172: skipped (javapush.at:207) - -stderr: -stdout: -704. d.at:138: testing D parser class api.token.raw true by default ... -705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./d.at:141: grep '[mb]4_' YYParser.y -stdout: -stderr: -704. d.at:138: types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS - skipped (d.at:141) -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +684. c++.at:1066: ok -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 stderr: stdout: -./c++.at:570: $here/modern -stdout: -706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... -Modern C++: 202002 -./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:570: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: $PREPARSER ./input stderr: -stdout: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:1555: $PREPARSER ./test +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... -stderr: ./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./c++.at:567: $here/modern ./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stdout: -Modern C++: 202002 -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stdout: -./c++.at:1555: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -stderr: -stdout: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stderr: -stdout: -./c++.at:1555: ./check -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ======== Testing with C++ standard flags: '' -stdout: ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 202002 -./c++.at:571: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./cxx-type.at:412: $PREPARSER ./types test-input @@ -257155,1181 +259061,834 @@ $1 = token '@' () Cleanup: popping nterm prog () 705. cxx-type.at:409: ok -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +./c++.at:1555: $PREPARSER ./test -./cxx-type.at:417: $PREPARSER ./types test-input stderr: -17.5: syntax error -./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:417: $PREPARSER ./types -p test-input +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... +stderr: +./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) -Entering state 1 -Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) -Entering state 8 -Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) -Entering state 15 -Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) -Entering state 8 +Stack now 0 Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) +0xff8a151f->Object::Object { } +0xff8a15c8->Object::Object { 0xff8a151f } +0xff8a151f->Object::~Object { 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33c4->Object::Object { 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0xff8a1518 } Entering state 1 +Stack now 0 1 +0xff8a15d8->Object::Object { 0x56ab33c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33c4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33c4->Object::~Object { 0x56ab33c4, 0xff8a15d8 } +0x56ab33c4->Object::Object { 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) -Entering state 4 -Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) -Entering state 13 -Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) -Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) +0xff8a151f->Object::Object { 0x56ab33c4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33d4->Object::Object { 0x56ab33c4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518 } Entering state 1 +Stack now 0 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33d4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33d4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +0x56ab33d4->Object::Object { 0x56ab33c4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) -Entering state 4 -Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) -Entering state 13 -Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) -Entering state 22 -Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) -Entering state 29 -Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) -Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33e4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518 } Entering state 1 +Stack now 0 10 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33e4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33e4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +0x56ab33e4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) -Entering state 8 -Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) -Entering state 14 -Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) -Entering state 24 -Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) -Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33f4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a1518 } Entering state 1 +Stack now 0 10 10 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33f4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33f4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15d8 } +0x56ab33f4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) -Entering state 4 -Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) -Entering state 12 -Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) -Entering state 18 -Reading a token -Next token is token ')' (11.4: ) -Stack 0 Entering state 18 -Next token is token ')' (11.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (11.4: ) -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' (11.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' (11.6: ) -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) -Entering state 8 +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'p' (0xff8a15c8 'p'Exception caught: cleaning lookahead and stack +0x56ab33f4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15c8 } +0x56ab33e4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15c8 } +0x56ab33d4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15c8 } +0x56ab33c4->Object::~Object { 0x56ab33c4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0xff8a15c8 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./cxx-type.at:423: $PREPARSER ./types test-input +stderr: +stderr: +syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) +0xff8a151f->Object::Object { } +0xff8a15c8->Object::Object { 0xff8a151f } +0xff8a151f->Object::~Object { 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33c4->Object::Object { 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0xff8a1518 } Entering state 1 -Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) -Entering state 4 -Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) -Entering state 12 -Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) -Entering state 18 -Reading a token -Next token is token ')' (13.4: ) -Stack 0 Entering state 18 -Next token is token ')' (13.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (13.4: ) -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' (13.5: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (15.0: ) -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) -Entering state 12 -Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) -Entering state 18 -Reading a token -Next token is token ')' (15.4: ) -Stack 0 Entering state 18 -Next token is token ')' (15.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (15.4: ) -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' (15.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) -Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID (15.8: ) -Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' (15.10: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) -Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID (15.12: ) -Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' (15.13: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (17.0: ) -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) -Entering state 12 -Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) -Entering state 18 -Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) -Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) -Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) -Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) -Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) -Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) -Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) -Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) -Reading a token -Next token is token ';' (17.15: ) -Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Stack now 0 1 +0xff8a15d8->Object::Object { 0x56ab33c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33c4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33c4->Object::~Object { 0x56ab33c4, 0xff8a15d8 } +0x56ab33c4->Object::Object { 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0xff8a15d8 } Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) -Entering state 1 -Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) -Entering state 8 +Stack now 0 10 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) -Entering state 15 +0xff8a151f->Object::Object { 0x56ab33c4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33d4->Object::Object { 0x56ab33c4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518 } +Entering state 1 +Stack now 0 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33d4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33d4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +0x56ab33d4->Object::Object { 0x56ab33c4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) -Entering state 8 +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33e4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518 } +Entering state 1 +Stack now 0 10 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33e4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33e4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +0x56ab33e4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'a' (0xff8a15c8 'a') +0xff8a1518->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518, 0xff8a15c8 } +Shifting token 'a' (0xff8a1518 'a') +0x56ab33f4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a1518 } +0xff8a1518->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a1518 } Entering state 1 +Stack now 0 10 10 10 1 +0xff8a15d8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56ab33f4 'a') +-> $$ = nterm item (0xff8a15d8 'a') +0x56ab33f4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15d8 } +0x56ab33f4->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15d8 } +0xff8a15d8->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15d8 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) -Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +0xff8a151f->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4 } +0xff8a15c8->Object::Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a151f } +0xff8a151f->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a151f, 0xff8a15c8 } +Next token is token 'p' (0xff8a15c8 'p'Exception caught: cleaning lookahead and stack +0x56ab33f4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0x56ab33f4, 0xff8a15c8 } +0x56ab33e4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0x56ab33e4, 0xff8a15c8 } +0x56ab33d4->Object::~Object { 0x56ab33c4, 0x56ab33d4, 0xff8a15c8 } +0x56ab33c4->Object::~Object { 0x56ab33c4, 0xff8a15c8 } +0xff8a15c8->Object::~Object { 0xff8a15c8 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +./cxx-type.at:423: $PREPARSER ./types -p test-input stderr: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) +Next token is token ';' () +Shifting token ';' () Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) +Next token is token '=' () +Shifting token '=' () Entering state 22 Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 29 Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) +Next token is token ';' () +Shifting token ';' () Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) +Next token is token '=' () +Shifting token '=' () Entering state 14 Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 24 Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (11.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) +On stack 0, shifting token '+' () Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (13.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' (13.5: ) +Next token is token ';' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) +On stack 1, shifting token ';' () Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (15.0: ) +Next token is token TYPENAME () Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (15.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' (15.6: ) +Next token is token '=' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) +Next token is token '=' () +On stack 0, shifting token '=' () Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) +On stack 1, shifting token '=' () Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID (15.8: ) +Next token is token ID () Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' (15.10: ) +Next token is token '+' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) +Next token is token '+' () +On stack 0, shifting token '+' () Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) +On stack 1, shifting token '+' () Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID (15.12: ) +Next token is token ID () Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' (15.13: ) +Next token is token ';' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) +On stack 1, shifting token ';' () Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (17.0: ) +Next token is token TYPENAME () Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) +Next token is token ')' () +Error: discarding token ')' () Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) +Next token is token '=' () +Error: discarding token '=' () Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ';' (17.15: ) +Next token is token ';' () Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Next token is token ';' () +Shifting token ';' () Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) +Next token is token '@' () +Shifting token '@' () Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -706. cxx-type.at:415: 708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... - ok -./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -stderr: -stdout: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./cxx-type.at:423: $PREPARSER ./types test-input -stderr: -syntax error -./c++.at:1363: $PREPARSER ./input aaaal +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () ./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: yylex -stderr: +exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./cxx-type.at:423: $PREPARSER ./types -p test-input -./c++.at:1363: $PREPARSER ./input i -stderr: stderr: Starting parse Entering state 0 @@ -258835,1075 +260394,1655 @@ Next token is token ID () Error: discarding token ID () Reading a token -Next token is token ';' () +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +707. cxx-type.at:420: ok +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./cxx-type.at:417: $PREPARSER ./types test-input +stderr: +17.5: syntax error +./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... +./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./cxx-type.at:417: $PREPARSER ./types -p test-input +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) +Entering state 1 +Reading a token +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) +Entering state 8 +Reading a token +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) +Entering state 15 +Reading a token +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) +Entering state 25 +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) +Entering state 8 +Reading a token +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) +Entering state 4 +Reading a token +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) +Entering state 11 +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) +Entering state 13 +Reading a token +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) +Entering state 23 +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) +Entering state 9 +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) +Entering state 4 +Reading a token +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) +Entering state 11 +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) +Entering state 13 +Reading a token +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) +Entering state 22 +Reading a token +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) +Entering state 29 +Reading a token +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) +Entering state 30 +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) +Entering state 9 +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) +Entering state 1 +Reading a token +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) +Entering state 8 +Reading a token +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) +Entering state 14 +Reading a token +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) +Entering state 24 +Reading a token +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) +Entering state 8 +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) +Entering state 4 +Reading a token +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) +Entering state 12 +Reading a token +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) +Entering state 18 +Reading a token +Next token is token ')' (11.4: ) +Stack 0 Entering state 18 +Next token is token ')' (11.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (11.4: ) +Stack 1 Entering state 21 +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (11.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' (11.6: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' (11.6: ) +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' (11.6: ) +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) +Entering state 25 +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) +Entering state 8 +Reading a token +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) +Entering state 4 +Reading a token +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) +Entering state 12 +Reading a token +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) +Entering state 18 +Reading a token +Next token is token ')' (13.4: ) +Stack 0 Entering state 18 +Next token is token ')' (13.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (13.4: ) +Stack 1 Entering state 21 +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (13.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' (13.5: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) +Stack 0 now in state 16 +On stack 1, shifting token ';' (13.5: ) +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME (15.0: ) +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME (15.0: ) +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) +Entering state 12 +Reading a token +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) +Entering state 18 +Reading a token +Next token is token ')' (15.4: ) +Stack 0 Entering state 18 +Next token is token ')' (15.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (15.4: ) +Stack 1 Entering state 21 +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (15.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' (15.6: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) +Stack 0 now in state 14 +On stack 1, shifting token '=' (15.6: ) +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID (15.8: ) +Stack 1 Entering state 22 +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) +Stack 0 now in state 5 +On stack 1, shifting token ID (15.8: ) +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' (15.10: ) +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) +Stack 0 now in state 15 +On stack 1, shifting token '+' (15.10: ) +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID (15.12: ) +Stack 1 Entering state 15 +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) +Stack 0 now in state 5 +On stack 1, shifting token ID (15.12: ) +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' (15.13: ) +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) +Stack 0 now in state 16 +On stack 1, shifting token ';' (15.13: ) +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME (17.0: ) +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME (17.0: ) +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) +Entering state 12 +Reading a token +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) +Entering state 18 +Reading a token +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) +Entering state 20 +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) +Entering state 3 +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) +Reading a token +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) +Reading a token +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) +Reading a token +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) +Reading a token +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) +Reading a token +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) +Reading a token +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +706. cxx-type.at:415: ok + stderr: -707. cxx-type.at:420: ok -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: $PREPARSER ./input i +stdout: +710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffff08ef->Object::Object { } -0xffff0998->Object::Object { 0xffff08ef } -0xffff08ef->Object::~Object { 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0xffff0998 } -0xffff088b->Object::Object { 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3c4->Object::Object { 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0xffff0900 } -Entering state 1 -Stack now 0 1 -0xffff09a8->Object::Object { 0x56c5f3c4 } +0xffd694df->Object::Object { } +0xffd69588->Object::Object { 0xffd694df } +0xffd694df->Object::~Object { 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0xffd69588 } +0xffd69588->Object::~Object { 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93c4->Object::Object { 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0xffd694d8 } +Entering state 2 +Stack now 0 2 +0xffd69598->Object::Object { 0x574c93c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3c4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3c4->Object::~Object { 0x56c5f3c4, 0xffff09a8 } -0x56c5f3c4->Object::Object { 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 + $1 = token 'a' (0x574c93c4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93c4->Object::~Object { 0x574c93c4, 0xffd69598 } +0x574c93c4->Object::Object { 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0xffd69598 } +Entering state 11 +Stack now 0 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3d4->Object::Object { 0x56c5f3c4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -Entering state 1 -Stack now 0 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4 } +0xffd694df->Object::Object { 0x574c93c4 } +0xffd69588->Object::Object { 0x574c93c4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93d4->Object::Object { 0x574c93c4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3d4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3d4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0x56c5f3d4->Object::Object { 0x56c5f3c4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 + $1 = token 'a' (0x574c93d4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93d4->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +0x574c93d4->Object::Object { 0x574c93c4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3e4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -Entering state 1 -Stack now 0 10 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93e4->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3e4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3e4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0x56c5f3e4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 10 + $1 = token 'a' (0x574c93e4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93e4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +0x574c93e4->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3f4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0900 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93f4->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3f4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3f4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -0x56c5f3f4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 10 10 + $1 = token 'a' (0x574c93f4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93f4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69598 } +0x574c93f4->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef, 0xffff0998 } -Next token is token 'p' (0xffff0998 'p'Exception caught: cleaning lookahead and stack -0x56c5f3f4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0998 } -0x56c5f3e4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0998 } -0x56c5f3d4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0998 } -0x56c5f3c4->Object::~Object { 0x56c5f3c4, 0xffff0998 } -0xffff0998->Object::~Object { 0xffff0998 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694df, 0xffd69588 } +Next token is token 'p' (0xffd69588 'p'Exception caught: cleaning lookahead and stack +0x574c93f4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69588 } +0x574c93e4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69588 } +0x574c93d4->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69588 } +0x574c93c4->Object::~Object { 0x574c93c4, 0xffd69588 } +0xffd69588->Object::~Object { 0xffd69588 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: - -./c++.at:569: $here/modern -stdout: -709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... -stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffff08ef->Object::Object { } -0xffff0998->Object::Object { 0xffff08ef } -0xffff08ef->Object::~Object { 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0xffff0998 } -0xffff088b->Object::Object { 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3c4->Object::Object { 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0xffff0900 } -Entering state 1 -Stack now 0 1 -0xffff09a8->Object::Object { 0x56c5f3c4 } +0xffd694df->Object::Object { } +0xffd69588->Object::Object { 0xffd694df } +0xffd694df->Object::~Object { 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0xffd69588 } +0xffd69588->Object::~Object { 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93c4->Object::Object { 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0xffd694d8 } +Entering state 2 +Stack now 0 2 +0xffd69598->Object::Object { 0x574c93c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3c4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3c4->Object::~Object { 0x56c5f3c4, 0xffff09a8 } -0x56c5f3c4->Object::Object { 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 + $1 = token 'a' (0x574c93c4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93c4->Object::~Object { 0x574c93c4, 0xffd69598 } +0x574c93c4->Object::Object { 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0xffd69598 } +Entering state 11 +Stack now 0 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3d4->Object::Object { 0x56c5f3c4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -Entering state 1 -Stack now 0 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4 } +0xffd694df->Object::Object { 0x574c93c4 } +0xffd69588->Object::Object { 0x574c93c4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93d4->Object::Object { 0x574c93c4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3d4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3d4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0x56c5f3d4->Object::Object { 0x56c5f3c4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 + $1 = token 'a' (0x574c93d4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93d4->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +0x574c93d4->Object::Object { 0x574c93c4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3e4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -Entering state 1 -Stack now 0 10 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93e4->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3e4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3e4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0x56c5f3e4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 10 + $1 = token 'a' (0x574c93e4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93e4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +0x574c93e4->Object::Object { 0x574c93c4, 0x574c93d4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff08ef, 0xffff0998 } -Next token is token 'a' (0xffff0998 'a') -0xffff0900->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0998 } -0xffff088b->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900, 0xffff0998 } -0xffff088b->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff088b, 0xffff0900, 0xffff0998 } -0xffff0998->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900, 0xffff0998 } -Shifting token 'a' (0xffff0900 'a') -0x56c5f3f4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0900 } -0xffff088f->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0900 } -0xffff088f->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff088f, 0xffff0900 } -0xffff0900->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0900 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffff09a8->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694df, 0xffd69588 } +Next token is token 'a' (0xffd69588 'a') +0xffd694d8->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69588 } +0xffd69588->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8, 0xffd69588 } +Shifting token 'a' (0xffd694d8 'a') +0x574c93f4->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd694d8 } +0xffd694d8->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694d8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xffd69598->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c5f3f4 'a') --> $$ = nterm item (0xffff09a8 'a') -0x56c5f3f4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -0x56c5f3f4->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff09a8 } -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef, 0xffff09a8 } -0xffff09a8->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff09a8 } -Entering state 10 -Stack now 0 10 10 10 10 + $1 = token 'a' (0x574c93f4 'a') +-> $$ = nterm item (0xffd69598 'a') +0x574c93f4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69598 } +0x574c93f4->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69598 } +0xffd69598->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69598 } +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -0xffff08ef->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4 } -0xffff0998->Object::Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef } -0xffff08ef->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff08ef, 0xffff0998 } -Next token is token 'p' (0xffff0998 'p'Exception caught: cleaning lookahead and stack -0x56c5f3f4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0x56c5f3f4, 0xffff0998 } -0x56c5f3e4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0x56c5f3e4, 0xffff0998 } -0x56c5f3d4->Object::~Object { 0x56c5f3c4, 0x56c5f3d4, 0xffff0998 } -0x56c5f3c4->Object::~Object { 0x56c5f3c4, 0xffff0998 } -0xffff0998->Object::~Object { 0xffff0998 } +0xffd694df->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4 } +0xffd69588->Object::Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694df } +0xffd694df->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd694df, 0xffd69588 } +Next token is token 'p' (0xffd69588 'p'Exception caught: cleaning lookahead and stack +0x574c93f4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0x574c93f4, 0xffd69588 } +0x574c93e4->Object::~Object { 0x574c93c4, 0x574c93d4, 0x574c93e4, 0xffd69588 } +0x574c93d4->Object::~Object { 0x574c93c4, 0x574c93d4, 0xffd69588 } +0x574c93c4->Object::~Object { 0x574c93c4, 0xffd69588 } +0xffd69588->Object::~Object { 0xffd69588 } exception caught: printer end { } -Modern C++: 202002 -stdout: -./c++.at:1363: grep '^exception caught: printer$' stderr -./c++.at:569: $PREPARSER ./list -./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... +./c++.at:1362: grep '^exception caught: printer$' stderr stdout: exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./c++.at:1362: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1362: $PREPARSER ./input aaaaE stderr: -stdout: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: $PREPARSER ./input -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:1362: $PREPARSER ./input aaaaT stderr: stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr types.y:87.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./existing.at:808: $PREPARSER ./input +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i stderr: -stdout: -stdout: -./c++.at:851: $PREPARSER ./input -./c++.at:572: $here/modern +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -======== Testing with C++ standard flags: '' -Modern C++: 202002 -./c++.at:572: $PREPARSER ./list -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -621. existing.at:808: ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5703aa00->Object::Object { } +Next token is token 'a' (0x5703aa00 'a') +Shifting token 'a' (0x5703aa00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa00 'a') +-> $$ = nterm item (0x5703aa00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5703aa30->Object::Object { 0x5703aa00 } +Next token is token 'a' (0x5703aa30 'a') +Shifting token 'a' (0x5703aa30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa30 'a') +-> $$ = nterm item (0x5703aa30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5703aa60->Object::Object { 0x5703aa00, 0x5703aa30 } +Next token is token 'a' (0x5703aa60 'a') +Shifting token 'a' (0x5703aa60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa60 'a') +-> $$ = nterm item (0x5703aa60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5703aa90->Object::Object { 0x5703aa00, 0x5703aa30, 0x5703aa60 } +Next token is token 'a' (0x5703aa90 'a') +Shifting token 'a' (0x5703aa90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa90 'a') +-> $$ = nterm item (0x5703aa90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5703aac0->Object::Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90 } +Next token is token 'p' (0x5703aac0 'p'Exception caught: cleaning lookahead and stack +0x5703aac0->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90, 0x5703aac0 } +0x5703aa90->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90 } +0x5703aa60->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60 } +0x5703aa30->Object::~Object { 0x5703aa00, 0x5703aa30 } +0x5703aa00->Object::~Object { 0x5703aa00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5703aa00->Object::Object { } +Next token is token 'a' (0x5703aa00 'a') +Shifting token 'a' (0x5703aa00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa00 'a') +-> $$ = nterm item (0x5703aa00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x5703aa30->Object::Object { 0x5703aa00 } +Next token is token 'a' (0x5703aa30 'a') +Shifting token 'a' (0x5703aa30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa30 'a') +-> $$ = nterm item (0x5703aa30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x5703aa60->Object::Object { 0x5703aa00, 0x5703aa30 } +Next token is token 'a' (0x5703aa60 'a') +Shifting token 'a' (0x5703aa60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa60 'a') +-> $$ = nterm item (0x5703aa60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x5703aa90->Object::Object { 0x5703aa00, 0x5703aa30, 0x5703aa60 } +Next token is token 'a' (0x5703aa90 'a') +Shifting token 'a' (0x5703aa90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5703aa90 'a') +-> $$ = nterm item (0x5703aa90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x5703aac0->Object::Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90 } +Next token is token 'p' (0x5703aac0 'p'Exception caught: cleaning lookahead and stack +0x5703aac0->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90, 0x5703aac0 } +0x5703aa90->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60, 0x5703aa90 } +0x5703aa60->Object::~Object { 0x5703aa00, 0x5703aa30, 0x5703aa60 } +0x5703aa30->Object::~Object { 0x5703aa00, 0x5703aa30 } +0x5703aa00->Object::~Object { 0x5703aa00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... -./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae stderr: -stdout: -./c++.at:1555: $PREPARSER ./test +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR stderr: -stdout: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:573: $here/modern stdout: -./c++.at:1555: ./check -stdout: -Modern C++: 202002 -./c++.at:573: $PREPARSER ./list -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:858: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./cxx-type.at:429: $PREPARSER ./types test-input stderr: +stderr: 17.5: syntax error +stdout: +./c++.at:1361: $PREPARSER ./input aaaas ./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./cxx-type.at:429: $PREPARSER ./types -p test-input stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -261038,37 +263177,169 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 708. cxx-type.at:426: ok +./c++.at:1361: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... -./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: -stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5826aa00->Object::Object { } +Next token is token 'a' (0x5826aa00 'a') +Shifting token 'a' (0x5826aa00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa00 'a') +-> $$ = nterm item (0x5826aa00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5826aa30->Object::Object { 0x5826aa00 } +Next token is token 'a' (0x5826aa30 'a') +Shifting token 'a' (0x5826aa30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa30 'a') +-> $$ = nterm item (0x5826aa30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5826aa60->Object::Object { 0x5826aa00, 0x5826aa30 } +Next token is token 'a' (0x5826aa60 'a') +Shifting token 'a' (0x5826aa60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa60 'a') +-> $$ = nterm item (0x5826aa60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5826aa90->Object::Object { 0x5826aa00, 0x5826aa30, 0x5826aa60 } +Next token is token 'a' (0x5826aa90 'a') +Shifting token 'a' (0x5826aa90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa90 'a') +-> $$ = nterm item (0x5826aa90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5826aac0->Object::Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90 } +Next token is token 'p' (0x5826aac0 'p'Exception caught: cleaning lookahead and stack +0x5826aac0->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90, 0x5826aac0 } +0x5826aa90->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90 } +0x5826aa60->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60 } +0x5826aa30->Object::~Object { 0x5826aa00, 0x5826aa30 } +0x5826aa00->Object::~Object { 0x5826aa00 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./cxx-type.at:435: $PREPARSER ./types test-input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5826aa00->Object::Object { } +Next token is token 'a' (0x5826aa00 'a') +Shifting token 'a' (0x5826aa00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa00 'a') +-> $$ = nterm item (0x5826aa00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5826aa30->Object::Object { 0x5826aa00 } +Next token is token 'a' (0x5826aa30 'a') +Shifting token 'a' (0x5826aa30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa30 'a') +-> $$ = nterm item (0x5826aa30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5826aa60->Object::Object { 0x5826aa00, 0x5826aa30 } +Next token is token 'a' (0x5826aa60 'a') +Shifting token 'a' (0x5826aa60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa60 'a') +-> $$ = nterm item (0x5826aa60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5826aa90->Object::Object { 0x5826aa00, 0x5826aa30, 0x5826aa60 } +Next token is token 'a' (0x5826aa90 'a') +Shifting token 'a' (0x5826aa90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5826aa90 'a') +-> $$ = nterm item (0x5826aa90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5826aac0->Object::Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90 } +Next token is token 'p' (0x5826aac0 'p'Exception caught: cleaning lookahead and stack +0x5826aac0->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90, 0x5826aac0 } +0x5826aa90->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60, 0x5826aa90 } +0x5826aa60->Object::~Object { 0x5826aa00, 0x5826aa30, 0x5826aa60 } +0x5826aa30->Object::~Object { 0x5826aa00, 0x5826aa30 } +0x5826aa00->Object::~Object { 0x5826aa00 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr stdout: +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae stderr: -syntax error -./c++.at:1362: $PREPARSER ./input aaaas +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... +./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +stdout: +stderr: +./cxx-type.at:435: $PREPARSER ./types test-input +types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1362: $PREPARSER ./input aaaal -stderr: stderr: +./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +syntax error +./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:435: $PREPARSER ./types -p test-input -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: stderr: -./c++.at:1360: $PREPARSER ./input aaaas Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -261677,14 +263948,8 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -stderr: -./c++.at:1362: $PREPARSER ./input i -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: initial-action Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -262293,502 +264558,26 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 709. cxx-type.at:432: ok -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: $PREPARSER ./input i -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: initial-action -./c++.at:1362: $PREPARSER ./input --debug aaaap -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff84970f->Object::Object { } -0xff8497b8->Object::Object { 0xff84970f } -0xff84970f->Object::~Object { 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0xff8497b8 } -0xff8497b8->Object::~Object { 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53c4->Object::Object { 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0xff849708 } -Entering state 2 -Stack now 0 2 -0xff8497c8->Object::Object { 0x57df53c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53c4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53c4->Object::~Object { 0x57df53c4, 0xff8497c8 } -0x57df53c4->Object::Object { 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4 } -0xff8497b8->Object::Object { 0x57df53c4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53d4->Object::Object { 0x57df53c4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff849708 } -Entering state 2 -Stack now 0 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53d4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53d4->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -0x57df53d4->Object::Object { 0x57df53c4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0x57df53d4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53e4->Object::Object { 0x57df53c4, 0x57df53d4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708 } -Entering state 2 -Stack now 0 11 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53e4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53e4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -0x57df53e4->Object::Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53f4->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff849708 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53f4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53f4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497c8 } -0x57df53f4->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff84970f, 0xff8497b8 } -Next token is token 'p' (0xff8497b8 'p'Exception caught: cleaning lookahead and stack -0x57df53f4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497b8 } -0x57df53e4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497b8 } -0x57df53d4->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497b8 } -0x57df53c4->Object::~Object { 0x57df53c4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0xff8497b8 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff84970f->Object::Object { } -0xff8497b8->Object::Object { 0xff84970f } -0xff84970f->Object::~Object { 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0xff8497b8 } -0xff8497b8->Object::~Object { 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53c4->Object::Object { 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0xff849708 } -Entering state 2 -Stack now 0 2 -0xff8497c8->Object::Object { 0x57df53c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53c4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53c4->Object::~Object { 0x57df53c4, 0xff8497c8 } -0x57df53c4->Object::Object { 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4 } -0xff8497b8->Object::Object { 0x57df53c4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53d4->Object::Object { 0x57df53c4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff849708 } -Entering state 2 -Stack now 0 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53d4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53d4->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -0x57df53d4->Object::Object { 0x57df53c4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0x57df53d4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53e4->Object::Object { 0x57df53c4, 0x57df53d4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708 } -Entering state 2 -Stack now 0 11 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53e4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53e4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -0x57df53e4->Object::Object { 0x57df53c4, 0x57df53d4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff84970f, 0xff8497b8 } -Next token is token 'a' (0xff8497b8 'a') -0xff849708->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708, 0xff8497b8 } -Shifting token 'a' (0xff849708 'a') -0x57df53f4->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff849708 } -0xff849708->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff849708 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff8497c8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57df53f4 'a') --> $$ = nterm item (0xff8497c8 'a') -0x57df53f4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497c8 } -0x57df53f4->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497c8 } -0xff8497c8->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497c8 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff84970f->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4 } -0xff8497b8->Object::Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff84970f } -0xff84970f->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff84970f, 0xff8497b8 } -Next token is token 'p' (0xff8497b8 'p'Exception caught: cleaning lookahead and stack -0x57df53f4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0x57df53f4, 0xff8497b8 } -0x57df53e4->Object::~Object { 0x57df53c4, 0x57df53d4, 0x57df53e4, 0xff8497b8 } -0x57df53d4->Object::~Object { 0x57df53c4, 0x57df53d4, 0xff8497b8 } -0x57df53c4->Object::~Object { 0x57df53c4, 0xff8497b8 } -0xff8497b8->Object::~Object { 0xff8497b8 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57eb1a00->Object::Object { } -Next token is token 'a' (0x57eb1a00 'a') -Shifting token 'a' (0x57eb1a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a00 'a') --> $$ = nterm item (0x57eb1a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x57eb1a30->Object::Object { 0x57eb1a00 } -Next token is token 'a' (0x57eb1a30 'a') -Shifting token 'a' (0x57eb1a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a30 'a') --> $$ = nterm item (0x57eb1a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x57eb1a60->Object::Object { 0x57eb1a00, 0x57eb1a30 } -Next token is token 'a' (0x57eb1a60 'a') -Shifting token 'a' (0x57eb1a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a60 'a') --> $$ = nterm item (0x57eb1a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x57eb1a90->Object::Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60 } -Next token is token 'a' (0x57eb1a90 'a') -Shifting token 'a' (0x57eb1a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a90 'a') --> $$ = nterm item (0x57eb1a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x57eb1ac0->Object::Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90 } -Next token is token 'p' (0x57eb1ac0 'p'Exception caught: cleaning lookahead and stack -0x57eb1ac0->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90, 0x57eb1ac0 } -0x57eb1a90->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90 } -0x57eb1a60->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60 } -0x57eb1a30->Object::~Object { 0x57eb1a00, 0x57eb1a30 } -0x57eb1a00->Object::~Object { 0x57eb1a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaaE -./c++.at:566: $here/modern -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57eb1a00->Object::Object { } -Next token is token 'a' (0x57eb1a00 'a') -Shifting token 'a' (0x57eb1a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a00 'a') --> $$ = nterm item (0x57eb1a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x57eb1a30->Object::Object { 0x57eb1a00 } -Next token is token 'a' (0x57eb1a30 'a') -Shifting token 'a' (0x57eb1a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a30 'a') --> $$ = nterm item (0x57eb1a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x57eb1a60->Object::Object { 0x57eb1a00, 0x57eb1a30 } -Next token is token 'a' (0x57eb1a60 'a') -Shifting token 'a' (0x57eb1a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a60 'a') --> $$ = nterm item (0x57eb1a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x57eb1a90->Object::Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60 } -Next token is token 'a' (0x57eb1a90 'a') -Shifting token 'a' (0x57eb1a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57eb1a90 'a') --> $$ = nterm item (0x57eb1a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x57eb1ac0->Object::Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90 } -Next token is token 'p' (0x57eb1ac0 'p'Exception caught: cleaning lookahead and stack -0x57eb1ac0->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90, 0x57eb1ac0 } -0x57eb1a90->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60, 0x57eb1a90 } -0x57eb1a60->Object::~Object { 0x57eb1a00, 0x57eb1a30, 0x57eb1a60 } -0x57eb1a30->Object::~Object { 0x57eb1a00, 0x57eb1a30 } -0x57eb1a00->Object::~Object { 0x57eb1a00 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -stderr: -stderr: -stdout: -713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... -exception caught: syntax error, unexpected end of file, expecting 'a' -Modern C++: 202302 -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:566: $PREPARSER ./list -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -exception caught: reduction -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stderr: -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -./c++.at:1360: $PREPARSER ./input aaaaE -./c++.at:1362: $PREPARSER ./input aaaaR -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... +./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +685. c++.at:1360: ok stderr: -======== Testing with C++ standard flags: '' stdout: -stderr: -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./cxx-type.at:441: $PREPARSER ./types test-input -./c++.at:1360: $PREPARSER ./input aaaaT -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -stderr: -./c++.at:1361: $PREPARSER ./input aaaap -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples 17.5: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS ./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1361: $PREPARSER ./input --debug aaaap -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:441: $PREPARSER ./types -p test-input stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -0x57346a00->Object::Object { } -Next token is token 'a' (0x57346a00 'a') -Shifting token 'a' (0x57346a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a00 'a') --> $$ = nterm item (0x57346a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57346a30->Object::Object { 0x57346a00 } -Next token is token 'a' (0x57346a30 'a') -Shifting token 'a' (0x57346a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a30 'a') --> $$ = nterm item (0x57346a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57346a60->Object::Object { 0x57346a00, 0x57346a30 } -Next token is token 'a' (0x57346a60 'a') -Shifting token 'a' (0x57346a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a60 'a') --> $$ = nterm item (0x57346a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x57346a90->Object::Object { 0x57346a00, 0x57346a30, 0x57346a60 } -Next token is token 'a' (0x57346a90 'a') -Shifting token 'a' (0x57346a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a90 'a') --> $$ = nterm item (0x57346a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x57346ac0->Object::Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90 } -Next token is token 'p' (0x57346ac0 'p'Exception caught: cleaning lookahead and stack -0x57346ac0->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90, 0x57346ac0 } -0x57346a90->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90 } -0x57346a60->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60 } -0x57346a30->Object::~Object { 0x57346a00, 0x57346a30 } -0x57346a00->Object::~Object { 0x57346a00 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 Reducing stack 0 by rule 1 (line 71): -> $$ = nterm prog (1.1: ) Entering state 1 @@ -263395,68 +265184,9 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -stderr: ./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57346a00->Object::Object { } -Next token is token 'a' (0x57346a00 'a') -Shifting token 'a' (0x57346a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a00 'a') --> $$ = nterm item (0x57346a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57346a30->Object::Object { 0x57346a00 } -Next token is token 'a' (0x57346a30 'a') -Shifting token 'a' (0x57346a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a30 'a') --> $$ = nterm item (0x57346a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57346a60->Object::Object { 0x57346a00, 0x57346a30 } -Next token is token 'a' (0x57346a60 'a') -Shifting token 'a' (0x57346a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a60 'a') --> $$ = nterm item (0x57346a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x57346a90->Object::Object { 0x57346a00, 0x57346a30, 0x57346a60 } -Next token is token 'a' (0x57346a90 'a') -Shifting token 'a' (0x57346a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57346a90 'a') --> $$ = nterm item (0x57346a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x57346ac0->Object::Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90 } -Next token is token 'p' (0x57346ac0 'p'Exception caught: cleaning lookahead and stack -0x57346ac0->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90, 0x57346ac0 } -0x57346a90->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60, 0x57346a90 } -0x57346a60->Object::~Object { 0x57346a00, 0x57346a30, 0x57346a60 } -0x57346a30->Object::~Object { 0x57346a00, 0x57346a30 } -0x57346a00->Object::~Object { 0x57346a00 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr stderr: -stdout: +686. c++.at:1361: ok Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -264066,86 +265796,37 @@ $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) 710. cxx-type.at:438: ok -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -666. c++.at:566: ok -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -stdout: -./c++.at:857: $PREPARSER ./input -./c++.at:1361: $PREPARSER ./input aaaaT -./c++.at:851: $PREPARSER ./input -stderr: -stderr: -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -stderr: -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:574: $here/modern + +713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... +./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y 714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... ./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y -stdout: -Modern C++: 202002 -./c++.at:574: $PREPARSER ./list -685. c++.at:1360: ok -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... ./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS ./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS - +./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./cxx-type.at:447: $PREPARSER ./types test-input stderr: syntax error -./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS ./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:447: $PREPARSER ./types -p test-input stderr: -stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -264754,9 +266435,8 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () +stderr: stdout: -716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... -./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y ./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -265368,52 +267048,19 @@ $1 = token '@' () Cleanup: popping nterm prog () 711. cxx-type.at:444: ok -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -686. c++.at:1361: ok -./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +689. c++.at:1371: ok +716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... +./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -stderr: -stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS 717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... ./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -689. c++.at:1371: ok -./c++.at:1066: $PREPARSER ./input < in -718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... -./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS - -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... -./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -stderr: -stdout: -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -stderr: stdout: ./cxx-type.at:452: $PREPARSER ./types test-input stderr: +./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS 17.5: syntax error ./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:452: $PREPARSER ./types -p test-input @@ -266640,38 +268287,262 @@ stderr: stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:1555: $PREPARSER ./test +./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... -./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaal +./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +714. glr-regression.at:205: ok +./c++.at:1363: $PREPARSER ./input i stderr: -stdout: -./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1363: $PREPARSER ./input aaaap stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffce032f->Object::Object { } +0xffce03d8->Object::Object { 0xffce032f } +0xffce032f->Object::~Object { 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0xffce03d8 } +0xffce03d8->Object::~Object { 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453c4->Object::Object { 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0xffce0328 } +Entering state 1 +Stack now 0 1 +0xffce03e8->Object::Object { 0x57f453c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453c4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453c4->Object::~Object { 0x57f453c4, 0xffce03e8 } +0x57f453c4->Object::Object { 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4 } +0xffce03d8->Object::Object { 0x57f453c4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453d4->Object::Object { 0x57f453c4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce0328 } +Entering state 1 +Stack now 0 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453d4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453d4->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +0x57f453d4->Object::Object { 0x57f453c4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453e4->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328 } +Entering state 1 +Stack now 0 10 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453e4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453e4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +0x57f453e4->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453f4->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce0328 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453f4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453f4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03e8 } +0x57f453f4->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce032f, 0xffce03d8 } +Next token is token 'p' (0xffce03d8 'p'Exception caught: cleaning lookahead and stack +0x57f453f4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03d8 } +0x57f453e4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03d8 } +0x57f453d4->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03d8 } +0x57f453c4->Object::~Object { 0x57f453c4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0xffce03d8 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffce032f->Object::Object { } +0xffce03d8->Object::Object { 0xffce032f } +0xffce032f->Object::~Object { 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0xffce03d8 } +0xffce03d8->Object::~Object { 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453c4->Object::Object { 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0xffce0328 } +Entering state 1 +Stack now 0 1 +0xffce03e8->Object::Object { 0x57f453c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453c4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453c4->Object::~Object { 0x57f453c4, 0xffce03e8 } +0x57f453c4->Object::Object { 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4 } +0xffce03d8->Object::Object { 0x57f453c4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453d4->Object::Object { 0x57f453c4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce0328 } +Entering state 1 +Stack now 0 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453d4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453d4->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +0x57f453d4->Object::Object { 0x57f453c4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453e4->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328 } +Entering state 1 +Stack now 0 10 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453e4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453e4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +0x57f453e4->Object::Object { 0x57f453c4, 0x57f453d4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce032f, 0xffce03d8 } +Next token is token 'a' (0xffce03d8 'a') +0xffce0328->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328, 0xffce03d8 } +Shifting token 'a' (0xffce0328 'a') +0x57f453f4->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce0328 } +0xffce0328->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce0328 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffce03e8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f453f4 'a') +-> $$ = nterm item (0xffce03e8 'a') +0x57f453f4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03e8 } +0x57f453f4->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03e8 } +0xffce03e8->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03e8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffce032f->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4 } +0xffce03d8->Object::Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce032f } +0xffce032f->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce032f, 0xffce03d8 } +Next token is token 'p' (0xffce03d8 'p'Exception caught: cleaning lookahead and stack +0x57f453f4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0x57f453f4, 0xffce03d8 } +0x57f453e4->Object::~Object { 0x57f453c4, 0x57f453d4, 0x57f453e4, 0xffce03d8 } +0x57f453d4->Object::~Object { 0x57f453c4, 0x57f453d4, 0xffce03d8 } +0x57f453c4->Object::~Object { 0x57f453c4, 0xffce03d8 } +0xffce03d8->Object::~Object { 0xffce03d8 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... +./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... +./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y ./cxx-type.at:458: $PREPARSER ./types test-input -./c++.at:857: $PREPARSER ./input -stderr: stderr: +./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS syntax error, unexpected ID, expecting '=' or '+' or ')' -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./cxx-type.at:458: $PREPARSER ./types -p test-input +./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS stderr: Starting parse Entering state 0 @@ -267892,656 +269763,278 @@ $1 = token '@' () Cleanup: popping nterm prog () 713. cxx-type.at:455: ok -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./c++.at:851: $PREPARSER ./input +./c++.at:1555: $PREPARSER ./test stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +691. c++.at:1517: ok + +720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... +./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y +./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS 721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... ./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -stderr: -stdout: -./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB -stderr: -./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -714. glr-regression.at:205: ok ./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS - -722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... -./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS -stderr: -stdout: -./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -717. glr-regression.at:354: ok -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal - -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffed188f->Object::Object { } -0xffed1948->Object::Object { 0xffed188f } -0xffed188f->Object::~Object { 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0xffed1948 } -0xffed1948->Object::~Object { 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233c4->Object::Object { 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0xffed1888 } -Entering state 1 -Stack now 0 1 -0xffed1958->Object::Object { 0x56e233c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233c4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233c4->Object::~Object { 0x56e233c4, 0xffed1958 } -0x56e233c4->Object::Object { 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0xffed1958 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4 } -0xffed1948->Object::Object { 0x56e233c4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233d4->Object::Object { 0x56e233c4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1888 } -Entering state 1 -Stack now 0 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233d4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233d4->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -0x56e233d4->Object::Object { 0x56e233c4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233e4->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888 } -Entering state 1 -Stack now 0 10 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233e4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233e4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -0x56e233e4->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233f4->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1888 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233f4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233f4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1958 } -0x56e233f4->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed188f, 0xffed1948 } -Next token is token 'p' (0xffed1948 'p'Exception caught: cleaning lookahead and stack -0x56e233f4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1948 } -0x56e233e4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1948 } -0x56e233d4->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1948 } -0x56e233c4->Object::~Object { 0x56e233c4, 0xffed1948 } -0xffed1948->Object::~Object { 0xffed1948 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... -./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffed188f->Object::Object { } -0xffed1948->Object::Object { 0xffed188f } -0xffed188f->Object::~Object { 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0xffed1948 } -0xffed1948->Object::~Object { 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233c4->Object::Object { 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0xffed1888 } -Entering state 1 -Stack now 0 1 -0xffed1958->Object::Object { 0x56e233c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233c4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233c4->Object::~Object { 0x56e233c4, 0xffed1958 } -0x56e233c4->Object::Object { 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0xffed1958 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4 } -0xffed1948->Object::Object { 0x56e233c4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233d4->Object::Object { 0x56e233c4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1888 } -Entering state 1 -Stack now 0 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233d4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233d4->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -0x56e233d4->Object::Object { 0x56e233c4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233e4->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888 } -Entering state 1 -Stack now 0 10 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233e4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233e4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -0x56e233e4->Object::Object { 0x56e233c4, 0x56e233d4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed188f, 0xffed1948 } -Next token is token 'a' (0xffed1948 'a') -0xffed1888->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1948 } -0xffed1948->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888, 0xffed1948 } -Shifting token 'a' (0xffed1888 'a') -0x56e233f4->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1888 } -0xffed1888->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1888 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffed1958->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e233f4 'a') --> $$ = nterm item (0xffed1958 'a') -0x56e233f4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1958 } -0x56e233f4->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1958 } -0xffed1958->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1958 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffed188f->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4 } -0xffed1948->Object::Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed188f } -0xffed188f->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed188f, 0xffed1948 } -Next token is token 'p' (0xffed1948 'p'Exception caught: cleaning lookahead and stack -0x56e233f4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0x56e233f4, 0xffed1948 } -0x56e233e4->Object::~Object { 0x56e233c4, 0x56e233d4, 0x56e233e4, 0xffed1948 } -0x56e233d4->Object::~Object { 0x56e233c4, 0x56e233d4, 0xffed1948 } -0x56e233c4->Object::~Object { 0x56e233c4, 0xffed1948 } -0xffed1948->Object::~Object { 0xffed1948 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB stderr: ./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 715. glr-regression.at:206: ok -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... -./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -stderr: +722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... +./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y stderr: stdout: -stdout: -./c++.at:851: $PREPARSER ./input -./c++.at:857: $PREPARSER ./input -stderr: +./c++.at:858: $PREPARSER ./input stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt -stderr: -./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -720. glr-regression.at:488: ok - -stderr: -stdout: -./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt -stderr: -725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... -./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt -stderr: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt -stderr: -./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -718. glr-regression.at:355: ok +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas stderr: exception caught: reduction - ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaal stderr: -stderr: exception caught: yylex -stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stdout: -./c++.at:568: $here/modern -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -Modern C++: 202302 ./c++.at:1362: $PREPARSER ./input i -./c++.at:568: $PREPARSER ./list +stderr: stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt stderr: exception caught: initial-action -stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaap +./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./c++.at:1362: $PREPARSER ./input --debug aaaap -stdout: -stderr: +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt Starting parse Entering state 0 Stack now 0 Reading a token -0xff8ef40f->Object::Object { } -0xff8ef4b8->Object::Object { 0xff8ef40f } -0xff8ef40f->Object::~Object { 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3c4->Object::Object { 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0xff8ef408 } +0xffb66f3f->Object::Object { } +0xffb66fe8->Object::Object { 0xffb66f3f } +0xffb66f3f->Object::~Object { 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3c4->Object::Object { 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0xffb66f38 } Entering state 2 Stack now 0 2 -0xff8ef4c8->Object::Object { 0x5749e3c4 } +0xffb66ff8->Object::Object { 0x57cdc3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3c4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3c4->Object::~Object { 0x5749e3c4, 0xff8ef4c8 } -0x5749e3c4->Object::Object { 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3c4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3c4->Object::~Object { 0x57cdc3c4, 0xffb66ff8 } +0x57cdc3c4->Object::Object { 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0xffb66ff8 } Entering state 11 Stack now 0 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3d4->Object::Object { 0x5749e3c4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3d4->Object::Object { 0x57cdc3c4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38 } Entering state 2 Stack now 0 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3d4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3d4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } -0x5749e3d4->Object::Object { 0x5749e3c4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3d4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3d4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } +0x57cdc3d4->Object::Object { 0x57cdc3c4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3e4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3e4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38 } Entering state 2 Stack now 0 11 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3e4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3e4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } -0x5749e3e4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3e4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3e4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } +0x57cdc3e4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3f4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3f4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f38 } Entering state 2 Stack now 0 11 11 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3f4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3f4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4c8 } -0x5749e3f4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3f4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3f4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66ff8 } +0x57cdc3f4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'p' (0xff8ef4b8 'p'Exception caught: cleaning lookahead and stack -0x5749e3f4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4b8 } -0x5749e3e4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4b8 } -0x5749e3d4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4b8 } -0x5749e3c4->Object::~Object { 0x5749e3c4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0xff8ef4b8 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'p' (0xffb66fe8 'p'Exception caught: cleaning lookahead and stack +0x57cdc3f4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66fe8 } +0x57cdc3e4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66fe8 } +0x57cdc3d4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66fe8 } +0x57cdc3c4->Object::~Object { 0x57cdc3c4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0xffb66fe8 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:570: $here/modern -./c++.at:567: $here/modern -stdout: -Modern C++: 202302 -./c++.at:570: $PREPARSER ./list -726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... -./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y stderr: -stdout: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff8ef40f->Object::Object { } -0xff8ef4b8->Object::Object { 0xff8ef40f } -0xff8ef40f->Object::~Object { 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3c4->Object::Object { 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0xff8ef408 } +0xffb66f3f->Object::Object { } +0xffb66fe8->Object::Object { 0xffb66f3f } +0xffb66f3f->Object::~Object { 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3c4->Object::Object { 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0xffb66f38 } Entering state 2 Stack now 0 2 -0xff8ef4c8->Object::Object { 0x5749e3c4 } +0xffb66ff8->Object::Object { 0x57cdc3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3c4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3c4->Object::~Object { 0x5749e3c4, 0xff8ef4c8 } -0x5749e3c4->Object::Object { 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3c4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3c4->Object::~Object { 0x57cdc3c4, 0xffb66ff8 } +0x57cdc3c4->Object::Object { 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0xffb66ff8 } Entering state 11 Stack now 0 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3d4->Object::Object { 0x5749e3c4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3d4->Object::Object { 0x57cdc3c4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38 } Entering state 2 Stack now 0 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3d4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3d4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } -0x5749e3d4->Object::Object { 0x5749e3c4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3d4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3d4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } +0x57cdc3d4->Object::Object { 0x57cdc3c4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3e4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3e4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38 } Entering state 2 Stack now 0 11 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3e4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3e4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } -0x5749e3e4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3e4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3e4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } +0x57cdc3e4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'a' (0xff8ef4b8 'a') -0xff8ef408->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408, 0xff8ef4b8 } -Shifting token 'a' (0xff8ef408 'a') -0x5749e3f4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef408 } -0xff8ef408->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef408 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'a' (0xffb66fe8 'a') +0xffb66f38->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38, 0xffb66fe8 } +Shifting token 'a' (0xffb66f38 'a') +0x57cdc3f4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66f38 } +0xffb66f38->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f38 } Entering state 2 Stack now 0 11 11 11 2 -0xff8ef4c8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4 } +0xffb66ff8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5749e3f4 'a') --> $$ = nterm item (0xff8ef4c8 'a') -0x5749e3f4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4c8 } -0x5749e3f4->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4c8 } -0xff8ef4c8->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4c8 } + $1 = token 'a' (0x57cdc3f4 'a') +-> $$ = nterm item (0xffb66ff8 'a') +0x57cdc3f4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66ff8 } +0x57cdc3f4->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66ff8 } +0xffb66ff8->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66ff8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xff8ef40f->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4 } -0xff8ef4b8->Object::Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef40f } -0xff8ef40f->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef40f, 0xff8ef4b8 } -Next token is token 'p' (0xff8ef4b8 'p'Exception caught: cleaning lookahead and stack -0x5749e3f4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0x5749e3f4, 0xff8ef4b8 } -0x5749e3e4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0x5749e3e4, 0xff8ef4b8 } -0x5749e3d4->Object::~Object { 0x5749e3c4, 0x5749e3d4, 0xff8ef4b8 } -0x5749e3c4->Object::~Object { 0x5749e3c4, 0xff8ef4b8 } -0xff8ef4b8->Object::~Object { 0xff8ef4b8 } +0xffb66f3f->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4 } +0xffb66fe8->Object::Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f3f } +0xffb66f3f->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66f3f, 0xffb66fe8 } +Next token is token 'p' (0xffb66fe8 'p'Exception caught: cleaning lookahead and stack +0x57cdc3f4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0x57cdc3f4, 0xffb66fe8 } +0x57cdc3e4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0x57cdc3e4, 0xffb66fe8 } +0x57cdc3d4->Object::~Object { 0x57cdc3c4, 0x57cdc3d4, 0xffb66fe8 } +0x57cdc3c4->Object::~Object { 0x57cdc3c4, 0xffb66fe8 } +0xffb66fe8->Object::~Object { 0xffb66fe8 } exception caught: printer end { } -Modern C++: 202302 ./c++.at:1362: grep '^exception caught: printer$' stderr -stderr: -./c++.at:567: $PREPARSER ./list +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -stderr: exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) +717. glr-regression.at:354: ok stderr: exception caught: syntax error -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaE -./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS stderr: + exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaT @@ -268550,482 +270043,293 @@ ./c++.at:1362: $PREPARSER ./input aaaaR stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -668. c++.at:568: ok - -670. c++.at:570: ok -667. c++.at:567: ok - -727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... -stderr: +723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... +./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS stderr: stdout: -./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -stdout: -./c++.at:1555: $PREPARSER ./test +./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt +stderr: +./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +720. glr-regression.at:488: ok -./c++.at:571: $here/modern +724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... +./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +687. c++.at:1362: ok +./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS + +725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... +./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y stderr: stdout: -Modern C++: 202302 -./c++.at:571: $PREPARSER ./list -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt stderr: -./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -stdout: +./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt +stderr: +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +718. glr-regression.at:355: ok + +726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... +./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS stderr: stdout: ./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt -./glr-regression.at:592: $PREPARSER ./glr-regr4 -stderr: stderr: -./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -723. glr-regression.at:592: ok -stderr: -stdout: -729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... -./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y -./c++.at:851: $PREPARSER ./input 721. glr-regression.at:489: ok -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -======== Testing with C++ standard flags: '' - -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS - -stderr: -671. c++.at:571: ok -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y - -730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... -./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y -stderr: -stdout: -731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... -./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y -./c++.at:857: $PREPARSER ./input -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS -./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS -732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... -./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y -./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS -stderr: -stdout: -./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB -stderr: -./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -716. glr-regression.at:207: ok -733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... -./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y stderr: stdout: -./glr-regression.at:670: $PREPARSER ./glr-regr5 +./glr-regression.at:592: $PREPARSER ./glr-regr4 stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' +./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +723. glr-regression.at:592: ok +727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... +./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -syntax is ambiguous -./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -729. glr-regression.at:670: ok -./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... +./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: stdout: ./glr-regression.at:596: $PREPARSER ./glr-regr4 stderr: - ./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 726. glr-regression.at:596: ok -stderr: -stdout: - -stderr: -./c++.at:572: $here/modern -stdout: -734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... -stdout: -./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y -Modern C++: 202302 -./c++.at:572: $PREPARSER ./list -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./glr-regression.at:593: $PREPARSER ./glr-regr4 -stderr: -./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... -./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y -724. glr-regression.at:593: ok -stderr: -stdout: -./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS -./c++.at:569: $here/modern -stdout: -Modern C++: 202302 -stderr: - -./c++.at:569: $PREPARSER ./list -stdout: -./glr-regression.at:738: $PREPARSER ./glr-regr6 -stderr: -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -732. glr-regression.at:738: ok -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -672. c++.at:572: ok -736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... -./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... -./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -669. c++.at:569: ok stderr: stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +./c++.at:858: $PREPARSER ./input stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... +./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y stderr: stdout: -stdout: -./c++.at:857: $PREPARSER ./input ./c++.at:1363: $PREPARSER ./input aaaas stderr: -stderr: exception caught: reduction -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... -./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -======== Testing with C++ standard flags: '' +./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS ./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS ./c++.at:1363: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... -./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y ./c++.at:1363: $PREPARSER ./input aaaap -stderr: +./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff9820bf->Object::Object { } -0xff982168->Object::Object { 0xff9820bf } -0xff9820bf->Object::~Object { 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0xff982168 } -0xff982168->Object::~Object { 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693c4->Object::Object { 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0xff9820b8 } +0xffd2833f->Object::Object { } +0xffd283e8->Object::Object { 0xffd2833f } +0xffd2833f->Object::~Object { 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0xffd283e8 } +0xffd283e8->Object::~Object { 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13c4->Object::Object { 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0xffd28338 } Entering state 1 Stack now 0 1 -0xff982178->Object::Object { 0x56c693c4 } +0xffd283f8->Object::Object { 0x567a13c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693c4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693c4->Object::~Object { 0x56c693c4, 0xff982178 } -0x56c693c4->Object::Object { 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0xff982178 } + $1 = token 'a' (0x567a13c4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13c4->Object::~Object { 0x567a13c4, 0xffd283f8 } +0x567a13c4->Object::Object { 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0xffd283f8 } Entering state 10 Stack now 0 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4 } -0xff982168->Object::Object { 0x56c693c4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693d4->Object::Object { 0x56c693c4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4 } +0xffd283e8->Object::Object { 0x567a13c4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13d4->Object::Object { 0x567a13c4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd28338 } Entering state 1 Stack now 0 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693d4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693d4->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982178 } -0x56c693d4->Object::Object { 0x56c693c4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982178 } + $1 = token 'a' (0x567a13d4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13d4->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } +0x567a13d4->Object::Object { 0x567a13c4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0x56c693d4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693e4->Object::Object { 0x56c693c4, 0x56c693d4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13e4->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338 } Entering state 1 Stack now 0 10 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693e4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693e4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } -0x56c693e4->Object::Object { 0x56c693c4, 0x56c693d4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } + $1 = token 'a' (0x567a13e4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13e4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } +0x567a13e4->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693f4->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13f4->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd28338 } Entering state 1 Stack now 0 10 10 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693f4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693f4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982178 } -0x56c693f4->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982178 } + $1 = token 'a' (0x567a13f4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13f4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283f8 } +0x567a13f4->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820bf, 0xff982168 } -Next token is token 'p' (0xff982168 'p'Exception caught: cleaning lookahead and stack -0x56c693f4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982168 } -0x56c693e4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982168 } -0x56c693d4->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982168 } -0x56c693c4->Object::~Object { 0x56c693c4, 0xff982168 } -0xff982168->Object::~Object { 0xff982168 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd2833f, 0xffd283e8 } +Next token is token 'p' (0xffd283e8 'p'Exception caught: cleaning lookahead and stack +0x567a13f4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283e8 } +0x567a13e4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283e8 } +0x567a13d4->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283e8 } +0x567a13c4->Object::~Object { 0x567a13c4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0xffd283e8 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff9820bf->Object::Object { } -0xff982168->Object::Object { 0xff9820bf } -0xff9820bf->Object::~Object { 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0xff982168 } -0xff982168->Object::~Object { 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693c4->Object::Object { 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0xff9820b8 } +0xffd2833f->Object::Object { } +0xffd283e8->Object::Object { 0xffd2833f } +0xffd2833f->Object::~Object { 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0xffd283e8 } +0xffd283e8->Object::~Object { 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13c4->Object::Object { 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0xffd28338 } Entering state 1 Stack now 0 1 -0xff982178->Object::Object { 0x56c693c4 } +0xffd283f8->Object::Object { 0x567a13c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693c4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693c4->Object::~Object { 0x56c693c4, 0xff982178 } -0x56c693c4->Object::Object { 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0xff982178 } + $1 = token 'a' (0x567a13c4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13c4->Object::~Object { 0x567a13c4, 0xffd283f8 } +0x567a13c4->Object::Object { 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0xffd283f8 } Entering state 10 Stack now 0 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4 } -0xff982168->Object::Object { 0x56c693c4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693d4->Object::Object { 0x56c693c4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4 } +0xffd283e8->Object::Object { 0x567a13c4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13d4->Object::Object { 0x567a13c4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd28338 } Entering state 1 Stack now 0 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693d4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693d4->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982178 } -0x56c693d4->Object::Object { 0x56c693c4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982178 } + $1 = token 'a' (0x567a13d4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13d4->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } +0x567a13d4->Object::Object { 0x567a13c4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0x56c693d4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693e4->Object::Object { 0x56c693c4, 0x56c693d4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13e4->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338 } Entering state 1 Stack now 0 10 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693e4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693e4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } -0x56c693e4->Object::Object { 0x56c693c4, 0x56c693d4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } + $1 = token 'a' (0x567a13e4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13e4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } +0x567a13e4->Object::Object { 0x567a13c4, 0x567a13d4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820bf, 0xff982168 } -Next token is token 'a' (0xff982168 'a') -0xff9820b8->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982168 } -0xff982168->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8, 0xff982168 } -Shifting token 'a' (0xff9820b8 'a') -0x56c693f4->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff9820b8 } -0xff9820b8->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820b8 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd2833f, 0xffd283e8 } +Next token is token 'a' (0xffd283e8 'a') +0xffd28338->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338, 0xffd283e8 } +Shifting token 'a' (0xffd28338 'a') +0x567a13f4->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd28338 } +0xffd28338->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd28338 } Entering state 1 Stack now 0 10 10 10 1 -0xff982178->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4 } +0xffd283f8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56c693f4 'a') --> $$ = nterm item (0xff982178 'a') -0x56c693f4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982178 } -0x56c693f4->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982178 } -0xff982178->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982178 } + $1 = token 'a' (0x567a13f4 'a') +-> $$ = nterm item (0xffd283f8 'a') +0x567a13f4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283f8 } +0x567a13f4->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283f8 } +0xffd283f8->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283f8 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff9820bf->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4 } -0xff982168->Object::Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820bf } -0xff9820bf->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff9820bf, 0xff982168 } -Next token is token 'p' (0xff982168 'p'Exception caught: cleaning lookahead and stack -0x56c693f4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0x56c693f4, 0xff982168 } -0x56c693e4->Object::~Object { 0x56c693c4, 0x56c693d4, 0x56c693e4, 0xff982168 } -0x56c693d4->Object::~Object { 0x56c693c4, 0x56c693d4, 0xff982168 } -0x56c693c4->Object::~Object { 0x56c693c4, 0xff982168 } -0xff982168->Object::~Object { 0xff982168 } +0xffd2833f->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4 } +0xffd283e8->Object::Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd2833f } +0xffd2833f->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd2833f, 0xffd283e8 } +Next token is token 'p' (0xffd283e8 'p'Exception caught: cleaning lookahead and stack +0x567a13f4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0x567a13f4, 0xffd283e8 } +0x567a13e4->Object::~Object { 0x567a13c4, 0x567a13d4, 0x567a13e4, 0xffd283e8 } +0x567a13d4->Object::~Object { 0x567a13c4, 0x567a13d4, 0xffd283e8 } +0x567a13c4->Object::~Object { 0x567a13c4, 0xffd283e8 } +0xffd283e8->Object::~Object { 0xffd283e8 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr @@ -269033,48 +270337,56 @@ exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae stderr: -./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./glr-regression.at:597: $PREPARSER ./glr-regr4 -stderr: ./c++.at:1363: $PREPARSER ./input aaaaE stderr: -stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaT -727. glr-regression.at:597: ok stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt -stderr: ./c++.at:1363: $PREPARSER ./input aaaaR -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -719. glr-regression.at:356: ok ======== Testing with C++ standard flags: '' ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:593: $PREPARSER ./glr-regr4 +stderr: +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +724. glr-regression.at:593: ok stderr: stdout: -./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt +./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB stderr: -./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +716. glr-regression.at:207: ok + +730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... +./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... +./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS stderr: -740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... -./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y stdout: -./glr-regression.at:671: $PREPARSER ./glr-regr5 -722. glr-regression.at:490: ok +./glr-regression.at:597: $PREPARSER ./glr-regr4 +stderr: +./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +727. glr-regression.at:597: ok + +stderr: +stderr: +stdout: +stdout: +./glr-regression.at:670: $PREPARSER ./glr-regr5 stderr: +./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt Ambiguity detected. Option 1, start -> @@ -269085,268 +270397,49 @@ 'a' syntax is ambiguous -./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... -./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y -730. glr-regression.at:671: ok - -./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS - -./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS -742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... -./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y -743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... -./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./glr-regression.at:843: $PREPARSER ./glr-regr7 -./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +729. glr-regression.at:670: ok +./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt stderr: -memory exhausted -./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -735. glr-regression.at:843: ok -./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... -./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y -./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... +./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y +719. glr-regression.at:356: ok + +733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... +./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap +stdout: +./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffb1208f->Object::Object { } -0xffb12138->Object::Object { 0xffb1208f } -0xffb1208f->Object::~Object { 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0xffb12138 } -0xffb12138->Object::~Object { 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3c4->Object::Object { 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0xffb12088 } -Entering state 2 -Stack now 0 2 -0xffb12148->Object::Object { 0x5727a3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3c4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3c4->Object::~Object { 0x5727a3c4, 0xffb12148 } -0x5727a3c4->Object::Object { 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0xffb12148 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4 } -0xffb12138->Object::Object { 0x5727a3c4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3d4->Object::Object { 0x5727a3c4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088 } -Entering state 2 -Stack now 0 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3d4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3d4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -0x5727a3d4->Object::Object { 0x5727a3c4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3e4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088 } -Entering state 2 -Stack now 0 11 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3e4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3e4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -0x5727a3e4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3f4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12088 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3f4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3f4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12148 } -0x5727a3f4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb1208f, 0xffb12138 } -Next token is token 'p' (0xffb12138 'p'Exception caught: cleaning lookahead and stack -0x5727a3f4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12138 } -0x5727a3e4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12138 } -0x5727a3d4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12138 } -0x5727a3c4->Object::~Object { 0x5727a3c4, 0xffb12138 } -0xffb12138->Object::~Object { 0xffb12138 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... +./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +722. glr-regression.at:490: ok + +./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... +./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y +./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffb1208f->Object::Object { } -0xffb12138->Object::Object { 0xffb1208f } -0xffb1208f->Object::~Object { 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0xffb12138 } -0xffb12138->Object::~Object { 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3c4->Object::Object { 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0xffb12088 } -Entering state 2 -Stack now 0 2 -0xffb12148->Object::Object { 0x5727a3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3c4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3c4->Object::~Object { 0x5727a3c4, 0xffb12148 } -0x5727a3c4->Object::Object { 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0xffb12148 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4 } -0xffb12138->Object::Object { 0x5727a3c4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3d4->Object::Object { 0x5727a3c4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088 } -Entering state 2 -Stack now 0 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3d4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3d4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -0x5727a3d4->Object::Object { 0x5727a3c4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3e4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088 } -Entering state 2 -Stack now 0 11 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3e4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3e4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -0x5727a3e4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb1208f, 0xffb12138 } -Next token is token 'a' (0xffb12138 'a') -0xffb12088->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12138 } -0xffb12138->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088, 0xffb12138 } -Shifting token 'a' (0xffb12088 'a') -0x5727a3f4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12088 } -0xffb12088->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12088 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffb12148->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5727a3f4 'a') --> $$ = nterm item (0xffb12148 'a') -0x5727a3f4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12148 } -0x5727a3f4->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12148 } -0xffb12148->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12148 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffb1208f->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4 } -0xffb12138->Object::Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb1208f } -0xffb1208f->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb1208f, 0xffb12138 } -Next token is token 'p' (0xffb12138 'p'Exception caught: cleaning lookahead and stack -0x5727a3f4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0x5727a3f4, 0xffb12138 } -0x5727a3e4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0x5727a3e4, 0xffb12138 } -0x5727a3d4->Object::~Object { 0x5727a3c4, 0x5727a3d4, 0xffb12138 } -0x5727a3c4->Object::~Object { 0x5727a3c4, 0xffb12138 } -0xffb12138->Object::~Object { 0xffb12138 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE +./c++.at:859: $PREPARSER ./input stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaaT -./glr-regression.at:739: $PREPARSER ./glr-regr6 -stderr: +./glr-regression.at:671: $PREPARSER ./glr-regr5 stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Ambiguity detected. Option 1, start -> @@ -269357,171 +270450,63 @@ 'a' syntax is ambiguous -./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -733. glr-regression.at:739: ok -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +730. glr-regression.at:671: ok -745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... -./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... +./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS stderr: stdout: -./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -./glr-regression.at:944: $PREPARSER ./glr-regr8 -stderr: -./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -738. glr-regression.at:944: ok +./glr-regression.at:738: $PREPARSER ./glr-regr6 stderr: -stdout: -./c++.at:573: $here/modern +Ambiguity detected. +Option 1, + start -> + 'a' -stdout: -Modern C++: 202302 -./c++.at:573: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -./glr-regression.at:844: $PREPARSER ./glr-regr7 -746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... -stderr: -stdout: -./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y -memory exhausted -./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:851: $PREPARSER ./input -stderr: -stderr: -stdout: -./glr-regression.at:1036: $PREPARSER ./glr-regr9 -stderr: -736. glr-regression.at:844: ok -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -741. glr-regression.at:1036: ok +Option 2, + start -> + 'a' +syntax is ambiguous +./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +732. glr-regression.at:738: ok stderr: -./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -673. c++.at:573: ok -747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... -./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y -stderr: stdout: -./c++.at:1555: $PREPARSER ./test +./glr-regression.at:843: $PREPARSER ./glr-regr7 stderr: +memory exhausted +./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +735. glr-regression.at:843: ok -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... -./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... +./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... +./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y +./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS stderr: stdout: -./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS ./glr-regression.at:594: $PREPARSER ./glr-regr4 stderr: -./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS -stderr: +./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS ./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:574: $here/modern -stderr: 725. glr-regression.at:594: ok -stdout: -stdout: -./glr-regression.at:1102: $PREPARSER ./glr-regr10 -Modern C++: 202302 -./c++.at:574: $PREPARSER ./list -stderr: -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... -./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y -744. glr-regression.at:1102: ok -691. c++.at:1517: ok - - -./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS -750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... -./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y -674. c++.at:574: ok -./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS -751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... -./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... -./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS -./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS -753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... -./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y -./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS +739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... +./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y stderr: stdout: -./glr-regression.at:945: $PREPARSER ./glr-regr8 +./c++.at:859: $PREPARSER ./input stderr: -./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -739. glr-regression.at:945: ok - +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS stderr: stdout: -./glr-regression.at:672: $PREPARSER ./glr-regr5 +./glr-regression.at:739: $PREPARSER ./glr-regr6 stderr: Ambiguity detected. Option 1, @@ -269533,94 +270518,46 @@ 'a' syntax is ambiguous -./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... -./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y -731. glr-regression.at:672: ok - -./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS -755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... -./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +733. glr-regression.at:739: ok stderr: stdout: ./glr-regression.at:598: $PREPARSER ./glr-regr4 stderr: -stderr: -stdout: -./glr-regression.at:1037: $PREPARSER ./glr-regr9 + ./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 728. glr-regression.at:598: ok -742. glr-regression.at:1037: ok -./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS - +740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... +./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... +./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y +./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS stderr: stdout: +./glr-regression.at:944: $PREPARSER ./glr-regr8 stderr: -./glr-regression.at:1174: $PREPARSER ./glr-regr11 -stderr: -./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:1103: $PREPARSER ./glr-regr10 -747. glr-regression.at:1174: ok -stderr: -./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... -./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -745. glr-regression.at:1103: ok -756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... - -./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y +./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +738. glr-regression.at:944: ok -./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS -./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS -stderr: -759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... -./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y -stdout: -758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... -./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./glr-regression.at:1310: $PREPARSER ./glr-regr12 -./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS -./c++.at:1066: $PREPARSER ./input < in -stderr: -./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:844: $PREPARSER ./glr-regr7 +742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... +./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y stderr: -error: invalid expression -750. glr-regression.at:1310: ok -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +736. glr-regression.at:844: ok -./c++.at:1066: $PREPARSER ./input < in -./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... -./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y -stderr: -stdout: -./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS -./c++.at:1363: $PREPARSER ./input aaaas -stderr: +./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./glr-regression.at:740: $PREPARSER ./glr-regr6 -./c++.at:1363: $PREPARSER ./input aaaal +./glr-regression.at:672: $PREPARSER ./glr-regr5 stderr: +743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... +./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y Ambiguity detected. Option 1, start -> @@ -269631,412 +270568,536 @@ 'a' syntax is ambiguous +./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +731. glr-regression.at:672: ok +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +stdout: +stderr: +./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaal + stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -684. c++.at:1066: ok -./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input i +./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS stderr: -734. glr-regression.at:740: ok exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ./c++.at:1363: $PREPARSER ./input aaaap stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff83638f->Object::Object { } -0xff836438->Object::Object { 0xff83638f } -0xff83638f->Object::~Object { 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0xff836438 } -0xff836438->Object::~Object { 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273c4->Object::Object { 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0xff836388 } +0xffd8044f->Object::Object { } +0xffd804f8->Object::Object { 0xffd8044f } +0xffd8044f->Object::~Object { 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0xffd804f8 } +0xffd804f8->Object::~Object { 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3c4->Object::Object { 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0xffd80448 } Entering state 1 Stack now 0 1 -0xff836448->Object::Object { 0x56e273c4 } +0xffd80508->Object::Object { 0x5732b3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273c4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273c4->Object::~Object { 0x56e273c4, 0xff836448 } -0x56e273c4->Object::Object { 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0xff836448 } + $1 = token 'a' (0x5732b3c4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3c4->Object::~Object { 0x5732b3c4, 0xffd80508 } +0x5732b3c4->Object::Object { 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0xffd80508 } Entering state 10 Stack now 0 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4 } -0xff836438->Object::Object { 0x56e273c4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273d4->Object::Object { 0x56e273c4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3d4->Object::Object { 0x5732b3c4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448 } Entering state 1 Stack now 0 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273d4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273d4->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836448 } -0x56e273d4->Object::Object { 0x56e273c4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836448 } + $1 = token 'a' (0x5732b3d4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3d4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } +0x5732b3d4->Object::Object { 0x5732b3c4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } Entering state 10 Stack now 0 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273e4->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3e4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448 } Entering state 1 Stack now 0 10 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273e4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273e4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } -0x56e273e4->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } + $1 = token 'a' (0x5732b3e4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3e4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } +0x5732b3e4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273f4->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3f4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80448 } Entering state 1 Stack now 0 10 10 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273f4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273f4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836448 } -0x56e273f4->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836448 } + $1 = token 'a' (0x5732b3f4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3f4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80508 } +0x5732b3f4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80508 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff83638f, 0xff836438 } -Next token is token 'p' (0xff836438 'p'Exception caught: cleaning lookahead and stack -0x56e273f4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836438 } -0x56e273e4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836438 } -0x56e273d4->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836438 } -0x56e273c4->Object::~Object { 0x56e273c4, 0xff836438 } -0xff836438->Object::~Object { 0xff836438 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd8044f, 0xffd804f8 } +Next token is token 'p' (0xffd804f8 'p'Exception caught: cleaning lookahead and stack +0x5732b3f4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd804f8 } +0x5732b3e4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd804f8 } +0x5732b3d4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd804f8 } +0x5732b3c4->Object::~Object { 0x5732b3c4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0xffd804f8 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... +./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff83638f->Object::Object { } -0xff836438->Object::Object { 0xff83638f } -0xff83638f->Object::~Object { 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0xff836438 } -0xff836438->Object::~Object { 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273c4->Object::Object { 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0xff836388 } +0xffd8044f->Object::Object { } +0xffd804f8->Object::Object { 0xffd8044f } +0xffd8044f->Object::~Object { 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0xffd804f8 } +0xffd804f8->Object::~Object { 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3c4->Object::Object { 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0xffd80448 } Entering state 1 Stack now 0 1 -0xff836448->Object::Object { 0x56e273c4 } +0xffd80508->Object::Object { 0x5732b3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273c4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273c4->Object::~Object { 0x56e273c4, 0xff836448 } -0x56e273c4->Object::Object { 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0xff836448 } + $1 = token 'a' (0x5732b3c4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3c4->Object::~Object { 0x5732b3c4, 0xffd80508 } +0x5732b3c4->Object::Object { 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0xffd80508 } Entering state 10 Stack now 0 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4 } -0xff836438->Object::Object { 0x56e273c4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273d4->Object::Object { 0x56e273c4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3d4->Object::Object { 0x5732b3c4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448 } Entering state 1 Stack now 0 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273d4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273d4->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836448 } -0x56e273d4->Object::Object { 0x56e273c4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836448 } + $1 = token 'a' (0x5732b3d4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3d4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } +0x5732b3d4->Object::Object { 0x5732b3c4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } Entering state 10 Stack now 0 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273e4->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3e4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448 } Entering state 1 Stack now 0 10 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273e4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273e4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } -0x56e273e4->Object::Object { 0x56e273c4, 0x56e273d4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } + $1 = token 'a' (0x5732b3e4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3e4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } +0x5732b3e4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff83638f, 0xff836438 } -Next token is token 'a' (0xff836438 'a') -0xff836388->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836438 } -0xff836438->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388, 0xff836438 } -Shifting token 'a' (0xff836388 'a') -0x56e273f4->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836388 } -0xff836388->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836388 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd8044f, 0xffd804f8 } +Next token is token 'a' (0xffd804f8 'a') +0xffd80448->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448, 0xffd804f8 } +Shifting token 'a' (0xffd80448 'a') +0x5732b3f4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80448 } +0xffd80448->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80448 } Entering state 1 Stack now 0 10 10 10 1 -0xff836448->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4 } +0xffd80508->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e273f4 'a') --> $$ = nterm item (0xff836448 'a') -0x56e273f4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836448 } -0x56e273f4->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836448 } -0xff836448->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836448 } + $1 = token 'a' (0x5732b3f4 'a') +-> $$ = nterm item (0xffd80508 'a') +0x5732b3f4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80508 } +0x5732b3f4->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd80508 } +0xffd80508->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd80508 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff83638f->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4 } -0xff836438->Object::Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff83638f } -0xff83638f->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff83638f, 0xff836438 } -Next token is token 'p' (0xff836438 'p'Exception caught: cleaning lookahead and stack -0x56e273f4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0x56e273f4, 0xff836438 } -0x56e273e4->Object::~Object { 0x56e273c4, 0x56e273d4, 0x56e273e4, 0xff836438 } -0x56e273d4->Object::~Object { 0x56e273c4, 0x56e273d4, 0xff836438 } -0x56e273c4->Object::~Object { 0x56e273c4, 0xff836438 } -0xff836438->Object::~Object { 0xff836438 } +0xffd8044f->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4 } +0xffd804f8->Object::Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd8044f } +0xffd8044f->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd8044f, 0xffd804f8 } +Next token is token 'p' (0xffd804f8 'p'Exception caught: cleaning lookahead and stack +0x5732b3f4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0x5732b3f4, 0xffd804f8 } +0x5732b3e4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0x5732b3e4, 0xffd804f8 } +0x5732b3d4->Object::~Object { 0x5732b3c4, 0x5732b3d4, 0xffd804f8 } +0x5732b3c4->Object::~Object { 0x5732b3c4, 0xffd804f8 } +0xffd804f8->Object::~Object { 0xffd804f8 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae -./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y stderr: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... -./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y +./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS ./c++.at:1363: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaT stderr: -./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaR stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1036: $PREPARSER ./glr-regr9 +stderr: +memory exhausted +./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +741. glr-regression.at:1036: ok + +745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... +./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +688. c++.at:1363: ok +stderr: +stdout: +./glr-regression.at:945: $PREPARSER ./glr-regr8 + +stderr: +./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +739. glr-regression.at:945: ok + +746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... +./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... +./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y +./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +stderr: +stdout: +./glr-regression.at:740: $PREPARSER ./glr-regr6 +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +734. glr-regression.at:740: ok +./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS + +748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... +./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./glr-regression.at:1102: $PREPARSER ./glr-regr10 +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' -./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +744. glr-regression.at:1102: ok + +749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... +./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stderr: stdout: -./glr-regression.at:1175: $PREPARSER ./glr-regr11 +./glr-regression.at:1037: $PREPARSER ./glr-regr9 stderr: -./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -748. glr-regression.at:1175: ok +memory exhausted +./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +742. glr-regression.at:1037: ok + +750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... +./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y +./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS +stderr: +stdout: +./glr-regression.at:1174: $PREPARSER ./glr-regr11 +stderr: +./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +747. glr-regression.at:1174: ok -763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... -./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y stderr: stdout: ./glr-regression.at:845: $PREPARSER ./glr-regr7 +751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... +./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y stderr: memory exhausted ./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./glr-regression.at:1445: $PREPARSER ./glr-regr13 737. glr-regression.at:845: ok + +./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... +./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS stderr: -./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS -./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:1103: $PREPARSER ./glr-regr10 stderr: +./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +745. glr-regression.at:1103: ok +stderr: stdout: -753. glr-regression.at:1445: ok -./glr-regression.at:1311: $PREPARSER ./glr-regr12 +./c++.at:859: $PREPARSER ./input stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... +./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y +./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS stderr: stdout: -./glr-regression.at:1785: $PREPARSER ./glr-regr15 -./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -751. glr-regression.at:1311: ok +./glr-regression.at:1310: $PREPARSER ./glr-regr12 stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> +./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +750. glr-regression.at:1310: ok +stderr: +stdout: +./glr-regression.at:1175: $PREPARSER ./glr-regr11 -Option 2, - ambiguity -> - ambiguity2 -> +stderr: +./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +748. glr-regression.at:1175: ok -syntax is ambiguous -./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... +./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... +./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +stderr: +stdout: +./glr-regression.at:946: $PREPARSER ./glr-regr8 +stderr: +./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +740. glr-regression.at:946: ok +stderr: +stdout: +./glr-regression.at:1038: $PREPARSER ./glr-regr9 +stderr: +memory exhausted +./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... +./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y +743. glr-regression.at:1038: ok -759. glr-regression.at:1785: ok +./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS +757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... +./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y +./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +stderr: +stderr: +stdout: +./glr-regression.at:1445: $PREPARSER ./glr-regr13 +stdout: +stderr: +./c++.at:859: $PREPARSER ./input +stderr: +./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +753. glr-regression.at:1445: ok +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... -765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... -./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y stderr: stdout: -./c++.at:857: $PREPARSER ./input +./glr-regression.at:1311: $PREPARSER ./glr-regr12 stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... -./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +751. glr-regression.at:1311: ok + +758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... +./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y +759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... +./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y +./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS stderr: stdout: -./c++.at:851: $PREPARSER ./input +./glr-regression.at:1104: $PREPARSER ./glr-regr10 stderr: -./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS -./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS -767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... -./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +746. glr-regression.at:1104: ok stdout: ./glr-regression.at:1678: $PREPARSER ./glr-regr14 stderr: -./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS ./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 756. glr-regression.at:1678: ok -stderr: -678. c++.at:848: ok -stdout: -./glr-regression.at:946: $PREPARSER ./glr-regr8 -stderr: + +760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... +./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... +./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS stderr: stdout: -./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:1446: $PREPARSER ./glr-regr13 stderr: -740. glr-regression.at:946: ok - ./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 754. glr-regression.at:1446: ok -768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... -./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y - -768. glr-regression.at:2035: ok -769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... -./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... +./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y +stderr: +stdout: +./glr-regression.at:1785: $PREPARSER ./glr-regr15 +stderr: +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> -769. glr-regression.at:2036: ok -770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... -./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +Option 2, + ambiguity -> + ambiguity2 -> -771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... -770. glr-regression.at:2037: ok -./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +syntax is ambiguous +./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +759. glr-regression.at:1785: ok +./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS -772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... -./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... -./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./glr-regression.at:1860: $PREPARSER ./glr-regr16 +./glr-regression.at:1176: $PREPARSER ./glr-regr11 stderr: -syntax error -./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -774. glr-regression.at:2229: testing Predicates: glr.c ... -./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -762. glr-regression.at:1860: ok -./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +749. glr-regression.at:1176: ok -./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... +./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y +./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... +./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y stderr: stdout: +./c++.at:859: $PREPARSER ./input stderr: -stdout: -./glr-regression.at:1038: $PREPARSER ./glr-regr9 -./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS stderr: +stdout: +./glr-regression.at:1679: $PREPARSER ./glr-regr14 stderr: -memory exhausted -./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal +./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +757. glr-regression.at:1679: ok + +765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... +./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y +./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS stderr: -743. glr-regression.at:1038: ok -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i -775. glr-regression.at:2230: testing Predicates: glr.cc ... -./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./glr-regression.at:1860: $PREPARSER ./glr-regr16 stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - stdout: -./glr-regression.at:1679: $PREPARSER ./glr-regr14 -./c++.at:1362: $PREPARSER ./input aaaap +syntax error +./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1312: $PREPARSER ./glr-regr12 stderr: +762. glr-regression.at:1860: ok +./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +752. glr-regression.at:1312: ok + + +767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... +./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... +./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS stderr: stdout: -./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -757. glr-regression.at:1679: ok -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:1786: $PREPARSER ./glr-regr15 stderr: Ambiguity detected. @@ -270049,233 +271110,31 @@ ambiguity2 -> syntax is ambiguous -./c++.at:1362: $PREPARSER ./input --debug aaaap ./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: 760. glr-regression.at:1786: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffc4475f->Object::Object { } -0xffc44808->Object::Object { 0xffc4475f } -0xffc4475f->Object::~Object { 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0xffc44808 } -0xffc44808->Object::~Object { 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833c4->Object::Object { 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0xffc44758 } -Entering state 2 -Stack now 0 2 -0xffc44818->Object::Object { 0x579833c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833c4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833c4->Object::~Object { 0x579833c4, 0xffc44818 } -0x579833c4->Object::Object { 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0xffc44818 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4 } -0xffc44808->Object::Object { 0x579833c4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833d4->Object::Object { 0x579833c4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44758 } -Entering state 2 -Stack now 0 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833d4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833d4->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44818 } -0x579833d4->Object::Object { 0x579833c4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0x579833d4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833e4->Object::Object { 0x579833c4, 0x579833d4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758 } -Entering state 2 -Stack now 0 11 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833e4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833e4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -0x579833e4->Object::Object { 0x579833c4, 0x579833d4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833f4->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44758 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833f4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833f4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44818 } -0x579833f4->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc4475f, 0xffc44808 } -Next token is token 'p' (0xffc44808 'p'Exception caught: cleaning lookahead and stack -0x579833f4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44808 } -0x579833e4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44808 } -0x579833d4->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44808 } -0x579833c4->Object::~Object { 0x579833c4, 0xffc44808 } -0xffc44808->Object::~Object { 0xffc44808 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... +./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +768. glr-regression.at:2035: ok + +769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... +./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +769. glr-regression.at:2036: ok + +770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... +./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +770. glr-regression.at:2037: ok + stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffc4475f->Object::Object { } -0xffc44808->Object::Object { 0xffc4475f } -0xffc4475f->Object::~Object { 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0xffc44808 } -0xffc44808->Object::~Object { 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833c4->Object::Object { 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0xffc44758 } -Entering state 2 -Stack now 0 2 -0xffc44818->Object::Object { 0x579833c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833c4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833c4->Object::~Object { 0x579833c4, 0xffc44818 } -0x579833c4->Object::Object { 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0xffc44818 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4 } -0xffc44808->Object::Object { 0x579833c4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833d4->Object::Object { 0x579833c4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44758 } -Entering state 2 -Stack now 0 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833d4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833d4->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44818 } -0x579833d4->Object::Object { 0x579833c4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0x579833d4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833e4->Object::Object { 0x579833c4, 0x579833d4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758 } -Entering state 2 -Stack now 0 11 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833e4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833e4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -0x579833e4->Object::Object { 0x579833c4, 0x579833d4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc4475f, 0xffc44808 } -Next token is token 'a' (0xffc44808 'a') -0xffc44758->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44808 } -0xffc44808->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758, 0xffc44808 } -Shifting token 'a' (0xffc44758 'a') -0x579833f4->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44758 } -0xffc44758->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44758 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffc44818->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x579833f4 'a') --> $$ = nterm item (0xffc44818 'a') -0x579833f4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44818 } -0x579833f4->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44818 } -0xffc44818->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44818 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffc4475f->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4 } -0xffc44808->Object::Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc4475f } -0xffc4475f->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc4475f, 0xffc44808 } -Next token is token 'p' (0xffc44808 'p'Exception caught: cleaning lookahead and stack -0x579833f4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0x579833f4, 0xffc44808 } -0x579833e4->Object::~Object { 0x579833c4, 0x579833d4, 0x579833e4, 0xffc44808 } -0x579833d4->Object::~Object { 0x579833c4, 0x579833d4, 0xffc44808 } -0x579833c4->Object::~Object { 0x579833c4, 0xffc44808 } -0xffc44808->Object::~Object { 0xffc44808 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -776. glr-regression.at:2231: testing Predicates: glr2.cc ... -./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR +./glr-regression.at:1447: $PREPARSER ./glr-regr13 stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +755. glr-regression.at:1447: 771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... +./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + ok + +./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./glr-regression.at:1964: $PREPARSER ./glr-regr17 @@ -270301,20 +271160,13 @@ 1.1-2.2: syntax is ambiguous ./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: 765. glr-regression.at:1964: ok -./glr-regression.at:1176: $PREPARSER ./glr-regr11 -stderr: -./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -749. glr-regression.at:1176: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... +./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + +./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... +./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./glr-regression.at:1861: $PREPARSER ./glr-regr16 @@ -270322,17 +271174,23 @@ syntax error ./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 763. glr-regression.at:1861: ok + +./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +774. glr-regression.at:2229: testing Predicates: glr.c ... +./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./glr-regression.at:2229: $PREPARSER ./input Nwin +./c++.at:859: $PREPARSER ./input stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./glr-regression.at:2229: $PREPARSER ./input Owin stdout: ./glr-regression.at:2149: $PREPARSER ./input --debug stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -270393,44 +271251,41 @@ Cleanup: popping nterm b () Cleanup: popping token 'a' () ./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owio -stderr: 771. glr-regression.at:2149: ok -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Nwio -stderr: -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -774. glr-regression.at:2229: ok -stderr: -stdout: -./glr-regression.at:1104: $PREPARSER ./glr-regr10 -stderr: -./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -746. glr-regression.at:1104: ok + +775. glr-regression.at:2230: testing Predicates: glr.cc ... +./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./glr-regression.at:1312: $PREPARSER ./glr-regr12 +./glr-regression.at:1787: $PREPARSER ./glr-regr15 stderr: -./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -752. glr-regression.at:1312: ok +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +761. glr-regression.at:1787: ok + +./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1680: $PREPARSER ./glr-regr14 stderr: +./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +758. glr-regression.at:1680: ok +776. glr-regression.at:2231: testing Predicates: glr2.cc ... +./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -stdout: -./glr-regression.at:1447: $PREPARSER ./glr-regr13 ./glr-regression.at:1965: $PREPARSER ./glr-regr17 stderr: -stderr: +./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Ambiguity detected. Option 1, start -> @@ -270452,12 +271307,27 @@ 1.1-2.2: syntax is ambiguous ./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +766. glr-regression.at:1965: ok +stderr: +stdout: +./glr-regression.at:2229: $PREPARSER ./input Nwin +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owin +stderr: +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owio +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Nwio +stderr: +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +774. glr-regression.at:2229: ok stderr: -755. glr-regression.at:1447: ok stdout: ./glr-regression.at:2150: $PREPARSER ./input --debug -766. glr-regression.at:1965: ok stderr: Starting parse Entering state 0 @@ -270522,242 +271392,18 @@ 772. glr-regression.at:2150: ok stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +./glr-regression.at:1862: $PREPARSER ./glr-regr16 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xfff6791f->Object::Object { } -0xfff679c8->Object::Object { 0xfff6791f } -0xfff6791f->Object::~Object { 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0xfff679c8 } -0xfff679c8->Object::~Object { 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943c4->Object::Object { 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0xfff67918 } -Entering state 1 -Stack now 0 1 -0xfff679d8->Object::Object { 0x57e943c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943c4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943c4->Object::~Object { 0x57e943c4, 0xfff679d8 } -0x57e943c4->Object::Object { 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4 } -0xfff679c8->Object::Object { 0x57e943c4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943d4->Object::Object { 0x57e943c4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff67918 } -Entering state 1 -Stack now 0 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943d4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943d4->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -0x57e943d4->Object::Object { 0x57e943c4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943e4->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918 } -Entering state 1 -Stack now 0 10 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943e4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943e4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -0x57e943e4->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943f4->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff67918 } -Entering state 1 -Stack now 0 10 10 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943f4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943f4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679d8 } -0x57e943f4->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff6791f, 0xfff679c8 } -Next token is token 'p' (0xfff679c8 'p'Exception caught: cleaning lookahead and stack -0x57e943f4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679c8 } -0x57e943e4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679c8 } -0x57e943d4->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679c8 } -0x57e943c4->Object::~Object { 0x57e943c4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0xfff679c8 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +764. glr-regression.at:1862: ok stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xfff6791f->Object::Object { } -0xfff679c8->Object::Object { 0xfff6791f } -0xfff6791f->Object::~Object { 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0xfff679c8 } -0xfff679c8->Object::~Object { 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943c4->Object::Object { 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0xfff67918 } -Entering state 1 -Stack now 0 1 -0xfff679d8->Object::Object { 0x57e943c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943c4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943c4->Object::~Object { 0x57e943c4, 0xfff679d8 } -0x57e943c4->Object::Object { 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4 } -0xfff679c8->Object::Object { 0x57e943c4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943d4->Object::Object { 0x57e943c4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff67918 } -Entering state 1 -Stack now 0 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943d4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943d4->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -0x57e943d4->Object::Object { 0x57e943c4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943e4->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918 } -Entering state 1 -Stack now 0 10 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943e4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943e4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -0x57e943e4->Object::Object { 0x57e943c4, 0x57e943d4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff6791f, 0xfff679c8 } -Next token is token 'a' (0xfff679c8 'a') -0xfff67918->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918, 0xfff679c8 } -Shifting token 'a' (0xfff67918 'a') -0x57e943f4->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff67918 } -0xfff67918->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff67918 } -Entering state 1 -Stack now 0 10 10 10 1 -0xfff679d8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57e943f4 'a') --> $$ = nterm item (0xfff679d8 'a') -0x57e943f4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679d8 } -0x57e943f4->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679d8 } -0xfff679d8->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679d8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xfff6791f->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4 } -0xfff679c8->Object::Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff6791f } -0xfff6791f->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff6791f, 0xfff679c8 } -Next token is token 'p' (0xfff679c8 'p'Exception caught: cleaning lookahead and stack -0x57e943f4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0x57e943f4, 0xfff679c8 } -0x57e943e4->Object::~Object { 0x57e943c4, 0x57e943d4, 0x57e943e4, 0xfff679c8 } -0x57e943d4->Object::~Object { 0x57e943c4, 0x57e943d4, 0xfff679c8 } -0x57e943c4->Object::~Object { 0x57e943c4, 0xfff679c8 } -0xfff679c8->Object::~Object { 0xfff679c8 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:860: $PREPARSER ./input stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./glr-regression.at:2230: $PREPARSER ./input Nwin @@ -270777,42 +271423,6 @@ 775. glr-regression.at:2230: ok stderr: stdout: -./glr-regression.at:1680: $PREPARSER ./glr-regr14 -stderr: -./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -758. glr-regression.at:1680: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -stderr: -stdout: -./glr-regression.at:1862: $PREPARSER ./glr-regr16 -./glr-regression.at:1787: $PREPARSER ./glr-regr15 -stderr: -syntax error -./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -764. glr-regression.at:1862: ok -761. glr-regression.at:1787: ok -stderr: -stdout: ./glr-regression.at:1966: $PREPARSER ./glr-regr17 stderr: Ambiguity detected. @@ -270839,6 +271449,13 @@ 767. glr-regression.at:1966: ok stderr: stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: ./glr-regression.at:2151: $PREPARSER ./input --debug stderr: Starting parse @@ -270904,22 +271521,22 @@ 773. glr-regression.at:2151: ok stderr: stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: ./glr-regression.at:2231: $PREPARSER ./input Nwin stderr: ./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:2231: $PREPARSER ./input Owin stderr: -stderr: syntax error, unexpected 'n', expecting 'o' ./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:858: $PREPARSER ./input ./glr-regression.at:2231: $PREPARSER ./input Owio stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: ./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:2231: $PREPARSER ./input Nwio stderr: @@ -270928,587 +271545,6 @@ 776. glr-regression.at:2231: ok stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffa7cb7f->Object::Object { } -0xffa7cc28->Object::Object { 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23c4->Object::Object { 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0xffa7cb78 } -Entering state 2 -Stack now 0 2 -0xffa7cc38->Object::Object { 0x572d23c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23c4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23c4->Object::~Object { 0x572d23c4, 0xffa7cc38 } -0x572d23c4->Object::Object { 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23d4->Object::Object { 0x572d23c4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23d4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23d4->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -0x572d23d4->Object::Object { 0x572d23c4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23e4->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23e4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23e4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -0x572d23e4->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23f4->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23f4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23f4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc38 } -0x572d23f4->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'p' (0xffa7cc28 'p'Exception caught: cleaning lookahead and stack -0x572d23f4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc28 } -0x572d23e4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc28 } -0x572d23d4->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc28 } -0x572d23c4->Object::~Object { 0x572d23c4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0xffa7cc28 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffa7cb7f->Object::Object { } -0xffa7cc28->Object::Object { 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23c4->Object::Object { 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0xffa7cb78 } -Entering state 2 -Stack now 0 2 -0xffa7cc38->Object::Object { 0x572d23c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23c4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23c4->Object::~Object { 0x572d23c4, 0xffa7cc38 } -0x572d23c4->Object::Object { 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23d4->Object::Object { 0x572d23c4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23d4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23d4->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -0x572d23d4->Object::Object { 0x572d23c4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23e4->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23e4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23e4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -0x572d23e4->Object::Object { 0x572d23c4, 0x572d23d4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'a' (0xffa7cc28 'a') -0xffa7cb78->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78, 0xffa7cc28 } -Shifting token 'a' (0xffa7cb78 'a') -0x572d23f4->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cb78 } -0xffa7cb78->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb78 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffa7cc38->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x572d23f4 'a') --> $$ = nterm item (0xffa7cc38 'a') -0x572d23f4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc38 } -0x572d23f4->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc38 } -0xffa7cc38->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc38 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xffa7cb7f->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4 } -0xffa7cc28->Object::Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb7f } -0xffa7cb7f->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cb7f, 0xffa7cc28 } -Next token is token 'p' (0xffa7cc28 'p'Exception caught: cleaning lookahead and stack -0x572d23f4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0x572d23f4, 0xffa7cc28 } -0x572d23e4->Object::~Object { 0x572d23c4, 0x572d23d4, 0x572d23e4, 0xffa7cc28 } -0x572d23d4->Object::~Object { 0x572d23c4, 0x572d23d4, 0xffa7cc28 } -0x572d23c4->Object::~Object { 0x572d23c4, 0xffa7cc28 } -0xffa7cc28->Object::~Object { 0xffa7cc28 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -687. c++.at:1362: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffb2bd0f->Object::Object { } -0xffb2bdb8->Object::Object { 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3c4->Object::Object { 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0xffb2bd08 } -Entering state 1 -Stack now 0 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3c4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3c4->Object::~Object { 0x57bfd3c4, 0xffb2bdc8 } -0x57bfd3c4->Object::Object { 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3d4->Object::Object { 0x57bfd3c4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3d4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3d4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -0x57bfd3d4->Object::Object { 0x57bfd3c4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3e4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3e4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3e4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -0x57bfd3e4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3f4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3f4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3f4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdc8 } -0x57bfd3f4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'p' (0xffb2bdb8 'p'Exception caught: cleaning lookahead and stack -0x57bfd3f4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdb8 } -0x57bfd3e4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdb8 } -0x57bfd3d4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdb8 } -0x57bfd3c4->Object::~Object { 0x57bfd3c4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0xffb2bdb8 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffb2bd0f->Object::Object { } -0xffb2bdb8->Object::Object { 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3c4->Object::Object { 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0xffb2bd08 } -Entering state 1 -Stack now 0 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3c4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3c4->Object::~Object { 0x57bfd3c4, 0xffb2bdc8 } -0x57bfd3c4->Object::Object { 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3d4->Object::Object { 0x57bfd3c4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3d4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3d4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -0x57bfd3d4->Object::Object { 0x57bfd3c4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3e4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3e4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3e4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -0x57bfd3e4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'a' (0xffb2bdb8 'a') -0xffb2bd08->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08, 0xffb2bdb8 } -Shifting token 'a' (0xffb2bd08 'a') -0x57bfd3f4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bd08 } -0xffb2bd08->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd08 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffb2bdc8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bfd3f4 'a') --> $$ = nterm item (0xffb2bdc8 'a') -0x57bfd3f4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdc8 } -0x57bfd3f4->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdc8 } -0xffb2bdc8->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdc8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffb2bd0f->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4 } -0xffb2bdb8->Object::Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd0f } -0xffb2bd0f->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bd0f, 0xffb2bdb8 } -Next token is token 'p' (0xffb2bdb8 'p'Exception caught: cleaning lookahead and stack -0x57bfd3f4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0x57bfd3f4, 0xffb2bdb8 } -0x57bfd3e4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0x57bfd3e4, 0xffb2bdb8 } -0x57bfd3d4->Object::~Object { 0x57bfd3c4, 0x57bfd3d4, 0xffb2bdb8 } -0x57bfd3c4->Object::~Object { 0x57bfd3c4, 0xffb2bdb8 } -0xffb2bdb8->Object::~Object { 0xffb2bdb8 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -688. c++.at:1363: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: ./c++.at:860: $PREPARSER ./input stderr: ./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -271558,7 +271594,7 @@ dh_testroot dh_prep dh_auto_install - make -j22 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no + make -j10 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make install-recursive make[2]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' @@ -271566,10 +271602,8 @@ make[3]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg/po' installing bg.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/bg/LC_MESSAGES/bison.mo installing ca.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ca/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing da.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/da/LC_MESSAGES/bison.mo installing de.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/de/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing el.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/el/LC_MESSAGES/bison.mo installing eo.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/eo/LC_MESSAGES/bison.mo installing es.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/es/LC_MESSAGES/bison.mo @@ -271579,29 +271613,22 @@ installing ga.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ga/LC_MESSAGES/bison.mo installing hr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/hr/LC_MESSAGES/bison.mo installing id.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/id/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing it.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/it/LC_MESSAGES/bison.mo installing ja.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ja/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing ms.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ms/LC_MESSAGES/bison.mo installing nb.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nb/LC_MESSAGES/bison.mo installing nl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nl/LC_MESSAGES/bison.mo installing pl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pl/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing pt.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pt/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing pt_BR.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo installing ro.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ro/LC_MESSAGES/bison.mo installing ru.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ru/LC_MESSAGES/bison.mo installing sr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sr/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing sv.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sv/LC_MESSAGES/bison.mo installing tr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/tr/LC_MESSAGES/bison.mo installing uk.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/uk/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing vi.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/vi/LC_MESSAGES/bison.mo installing zh_CN.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo -libfakeroot internal error: payload not recognized! installing zh_TW.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo if test "bison" = "gettext-tools"; then \ /usr/bin/mkdir -p /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ @@ -271627,12 +271654,10 @@ installing eo.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo installing es.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo installing et.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing fi.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo installing fr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo installing ga.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo installing gl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing hr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo installing hu.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo installing ia.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo @@ -271643,7 +271668,6 @@ installing lt.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo installing lv.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo installing ms.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing nb.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo installing nl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo installing pl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo @@ -271653,18 +271677,13 @@ installing ru.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo installing sl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo installing sq.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing sr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing sv.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo installing ta.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing th.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing tr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo installing uk.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo installing vi.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo -libfakeroot internal error: payload not recognized! installing zh_CN.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo installing zh_TW.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo if test "bison" = "gettext-tools"; then \ @@ -271683,13 +271702,9 @@ Making install in gnulib-po make[3]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg/gnulib-po' installing af.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing be.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing bg.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing ca.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing cs.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo installing da.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo installing de.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo @@ -271701,35 +271716,28 @@ installing fi.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo installing fr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo installing ga.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing gl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo installing hu.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo installing it.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo installing ja.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo installing ko.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo installing ms.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing nb.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo installing nl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo installing pl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing pt.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo installing pt_BR.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing ro.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo installing ru.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo installing rw.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing sk.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo installing sl.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo installing sr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo installing sv.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing tr.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo installing uk.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo installing vi.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo installing zh_CN.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo -libfakeroot internal error: payload not recognized! installing zh_TW.gmo as /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo if test "bison" = "gettext-tools"; then \ /usr/bin/mkdir -p /build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ @@ -271756,70 +271764,70 @@ /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' - /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' && ranlib liby.a ) /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' + /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' - /usr/bin/install -c -m 644 examples/c/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' - /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' + /usr/bin/install -c -m 644 examples/c/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' + /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' + /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' && ranlib liby.a ) /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' /usr/bin/install -c -m 644 examples/d/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' /usr/bin/install -c -m 644 examples/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' - /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' /usr/bin/install -c -m 644 examples/java/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' + /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' - /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' + /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' + /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' + /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' + /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' + /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' /usr/bin/mkdir -p doc /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' - /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' + /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' LC_ALL=C tests/bison --version >doc/bison.help.tmp /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' + /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp - /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1' @@ -271846,117 +271854,117 @@ dh_perl dh_link dh_strip_nondeterminism -Garbage at end of string in strptime: +02:00 at /usr/lib/i386-linux-gnu/perl/5.40/Time/Piece.pm line 598. -Perhaps a format flag did not match the actual input? at /usr/lib/i386-linux-gnu/perl/5.40/Time/Piece.pm line 598. - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo +Garbage at end of string in strptime: +02:00 at /usr/lib/i386-linux-gnu/perl/5.40/Time/Piece.pm line 598. +Perhaps a format flag did not match the actual input? at /usr/lib/i386-linux-gnu/perl/5.40/Time/Piece.pm line 598. + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo dh_compress dh_fixperms dh_missing @@ -271978,12 +271986,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: including full source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/31488/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/44334 and its subdirectories -I: Current time: Fri Jan 30 20:20:39 -12 2026 -I: pbuilder-time-stamp: 1769847639 +I: removing directory /srv/workspace/pbuilder/31488 and its subdirectories +I: Current time: Sun Dec 29 16:20:05 +14 2024 +I: pbuilder-time-stamp: 1735438805